blob: a66f2fcae184b70cc889c8bc9b5d971f8974f36d [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
503 mState(CLOSED),
504 mIsDeviceLinked(false),
505 mIsMainCamera(true),
506 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700507 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800508 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800509 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700510 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800511 mIsApInputUsedForHdrPlus(false),
512 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800513 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700514{
515 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mCommon.init(gCamCapability[cameraId]);
517 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700518#ifndef USE_HAL_3_3
519 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
520#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700521 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700522#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700523 mCameraDevice.common.close = close_camera_device;
524 mCameraDevice.ops = &mCameraOps;
525 mCameraDevice.priv = this;
526 gCamCapability[cameraId]->version = CAM_HAL_V3;
527 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
528 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
529 gCamCapability[cameraId]->min_num_pp_bufs = 3;
530
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800531 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700532
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800533 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mPendingLiveRequest = 0;
535 mCurrentRequestId = -1;
536 pthread_mutex_init(&mMutex, NULL);
537
538 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
539 mDefaultMetadata[i] = NULL;
540
541 // Getting system props of different kinds
542 char prop[PROPERTY_VALUE_MAX];
543 memset(prop, 0, sizeof(prop));
544 property_get("persist.camera.raw.dump", prop, "0");
545 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800546 property_get("persist.camera.hal3.force.hdr", prop, "0");
547 mForceHdrSnapshot = atoi(prop);
548
Thierry Strudel3d639192016-09-09 11:52:26 -0700549 if (mEnableRawDump)
550 LOGD("Raw dump from Camera HAL enabled");
551
552 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
553 memset(mLdafCalib, 0, sizeof(mLdafCalib));
554
555 memset(prop, 0, sizeof(prop));
556 property_get("persist.camera.tnr.preview", prop, "0");
557 m_bTnrPreview = (uint8_t)atoi(prop);
558
559 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800560 property_get("persist.camera.swtnr.preview", prop, "1");
561 m_bSwTnrPreview = (uint8_t)atoi(prop);
562
563 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700564 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700565 m_bTnrVideo = (uint8_t)atoi(prop);
566
567 memset(prop, 0, sizeof(prop));
568 property_get("persist.camera.avtimer.debug", prop, "0");
569 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800570 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700571
Thierry Strudel54dc9782017-02-15 12:12:10 -0800572 memset(prop, 0, sizeof(prop));
573 property_get("persist.camera.cacmode.disable", prop, "0");
574 m_cacModeDisabled = (uint8_t)atoi(prop);
575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 //Load and read GPU library.
577 lib_surface_utils = NULL;
578 LINK_get_surface_pixel_alignment = NULL;
579 mSurfaceStridePadding = CAM_PAD_TO_32;
580 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
581 if (lib_surface_utils) {
582 *(void **)&LINK_get_surface_pixel_alignment =
583 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
584 if (LINK_get_surface_pixel_alignment) {
585 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
586 }
587 dlclose(lib_surface_utils);
588 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700589
Emilian Peev0f3c3162017-03-15 12:57:46 +0000590 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
591 mPDSupported = (0 <= mPDIndex) ? true : false;
592
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700593 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700594}
595
596/*===========================================================================
597 * FUNCTION : ~QCamera3HardwareInterface
598 *
599 * DESCRIPTION: destructor of QCamera3HardwareInterface
600 *
601 * PARAMETERS : none
602 *
603 * RETURN : none
604 *==========================================================================*/
605QCamera3HardwareInterface::~QCamera3HardwareInterface()
606{
607 LOGD("E");
608
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800609 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700610
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800611 // Disable power hint and enable the perf lock for close camera
612 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
613 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
614
615 // unlink of dualcam during close camera
616 if (mIsDeviceLinked) {
617 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
618 &m_pDualCamCmdPtr->bundle_info;
619 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
620 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
621 pthread_mutex_lock(&gCamLock);
622
623 if (mIsMainCamera == 1) {
624 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
625 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
626 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
627 // related session id should be session id of linked session
628 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
629 } else {
630 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
631 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
632 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
633 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
634 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800635 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 pthread_mutex_unlock(&gCamLock);
637
638 rc = mCameraHandle->ops->set_dual_cam_cmd(
639 mCameraHandle->camera_handle);
640 if (rc < 0) {
641 LOGE("Dualcam: Unlink failed, but still proceed to close");
642 }
643 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700644
645 /* We need to stop all streams before deleting any stream */
646 if (mRawDumpChannel) {
647 mRawDumpChannel->stop();
648 }
649
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700650 if (mHdrPlusRawSrcChannel) {
651 mHdrPlusRawSrcChannel->stop();
652 }
653
Thierry Strudel3d639192016-09-09 11:52:26 -0700654 // NOTE: 'camera3_stream_t *' objects are already freed at
655 // this stage by the framework
656 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
657 it != mStreamInfo.end(); it++) {
658 QCamera3ProcessingChannel *channel = (*it)->channel;
659 if (channel) {
660 channel->stop();
661 }
662 }
663 if (mSupportChannel)
664 mSupportChannel->stop();
665
666 if (mAnalysisChannel) {
667 mAnalysisChannel->stop();
668 }
669 if (mMetadataChannel) {
670 mMetadataChannel->stop();
671 }
672 if (mChannelHandle) {
673 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
674 mChannelHandle);
675 LOGD("stopping channel %d", mChannelHandle);
676 }
677
678 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
679 it != mStreamInfo.end(); it++) {
680 QCamera3ProcessingChannel *channel = (*it)->channel;
681 if (channel)
682 delete channel;
683 free (*it);
684 }
685 if (mSupportChannel) {
686 delete mSupportChannel;
687 mSupportChannel = NULL;
688 }
689
690 if (mAnalysisChannel) {
691 delete mAnalysisChannel;
692 mAnalysisChannel = NULL;
693 }
694 if (mRawDumpChannel) {
695 delete mRawDumpChannel;
696 mRawDumpChannel = NULL;
697 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700698 if (mHdrPlusRawSrcChannel) {
699 delete mHdrPlusRawSrcChannel;
700 mHdrPlusRawSrcChannel = NULL;
701 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700702 if (mDummyBatchChannel) {
703 delete mDummyBatchChannel;
704 mDummyBatchChannel = NULL;
705 }
706
707 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800708 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700709
710 if (mMetadataChannel) {
711 delete mMetadataChannel;
712 mMetadataChannel = NULL;
713 }
714
715 /* Clean up all channels */
716 if (mCameraInitialized) {
717 if(!mFirstConfiguration){
718 //send the last unconfigure
719 cam_stream_size_info_t stream_config_info;
720 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
721 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
722 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800723 m_bIs4KVideo ? 0 :
724 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700725 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700726 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
727 stream_config_info);
728 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
729 if (rc < 0) {
730 LOGE("set_parms failed for unconfigure");
731 }
732 }
733 deinitParameters();
734 }
735
736 if (mChannelHandle) {
737 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
738 mChannelHandle);
739 LOGH("deleting channel %d", mChannelHandle);
740 mChannelHandle = 0;
741 }
742
743 if (mState != CLOSED)
744 closeCamera();
745
746 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
747 req.mPendingBufferList.clear();
748 }
749 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700750 for (pendingRequestIterator i = mPendingRequestsList.begin();
751 i != mPendingRequestsList.end();) {
752 i = erasePendingRequest(i);
753 }
754 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
755 if (mDefaultMetadata[i])
756 free_camera_metadata(mDefaultMetadata[i]);
757
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800758 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700759
760 pthread_cond_destroy(&mRequestCond);
761
762 pthread_cond_destroy(&mBuffersCond);
763
764 pthread_mutex_destroy(&mMutex);
765 LOGD("X");
766}
767
768/*===========================================================================
769 * FUNCTION : erasePendingRequest
770 *
771 * DESCRIPTION: function to erase a desired pending request after freeing any
772 * allocated memory
773 *
774 * PARAMETERS :
775 * @i : iterator pointing to pending request to be erased
776 *
777 * RETURN : iterator pointing to the next request
778 *==========================================================================*/
779QCamera3HardwareInterface::pendingRequestIterator
780 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
781{
782 if (i->input_buffer != NULL) {
783 free(i->input_buffer);
784 i->input_buffer = NULL;
785 }
786 if (i->settings != NULL)
787 free_camera_metadata((camera_metadata_t*)i->settings);
788 return mPendingRequestsList.erase(i);
789}
790
791/*===========================================================================
792 * FUNCTION : camEvtHandle
793 *
794 * DESCRIPTION: Function registered to mm-camera-interface to handle events
795 *
796 * PARAMETERS :
797 * @camera_handle : interface layer camera handle
798 * @evt : ptr to event
799 * @user_data : user data ptr
800 *
801 * RETURN : none
802 *==========================================================================*/
803void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
804 mm_camera_event_t *evt,
805 void *user_data)
806{
807 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
808 if (obj && evt) {
809 switch(evt->server_event_type) {
810 case CAM_EVENT_TYPE_DAEMON_DIED:
811 pthread_mutex_lock(&obj->mMutex);
812 obj->mState = ERROR;
813 pthread_mutex_unlock(&obj->mMutex);
814 LOGE("Fatal, camera daemon died");
815 break;
816
817 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
818 LOGD("HAL got request pull from Daemon");
819 pthread_mutex_lock(&obj->mMutex);
820 obj->mWokenUpByDaemon = true;
821 obj->unblockRequestIfNecessary();
822 pthread_mutex_unlock(&obj->mMutex);
823 break;
824
825 default:
826 LOGW("Warning: Unhandled event %d",
827 evt->server_event_type);
828 break;
829 }
830 } else {
831 LOGE("NULL user_data/evt");
832 }
833}
834
835/*===========================================================================
836 * FUNCTION : openCamera
837 *
838 * DESCRIPTION: open camera
839 *
840 * PARAMETERS :
841 * @hw_device : double ptr for camera device struct
842 *
843 * RETURN : int32_t type of status
844 * NO_ERROR -- success
845 * none-zero failure code
846 *==========================================================================*/
847int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
848{
849 int rc = 0;
850 if (mState != CLOSED) {
851 *hw_device = NULL;
852 return PERMISSION_DENIED;
853 }
854
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800855 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700856 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
857 mCameraId);
858
859 rc = openCamera();
860 if (rc == 0) {
861 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800862 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700863 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800864 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700865
Thierry Strudel3d639192016-09-09 11:52:26 -0700866 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
867 mCameraId, rc);
868
869 if (rc == NO_ERROR) {
870 mState = OPENED;
871 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800872
Thierry Strudel3d639192016-09-09 11:52:26 -0700873 return rc;
874}
875
876/*===========================================================================
877 * FUNCTION : openCamera
878 *
879 * DESCRIPTION: open camera
880 *
881 * PARAMETERS : none
882 *
883 * RETURN : int32_t type of status
884 * NO_ERROR -- success
885 * none-zero failure code
886 *==========================================================================*/
887int QCamera3HardwareInterface::openCamera()
888{
889 int rc = 0;
890 char value[PROPERTY_VALUE_MAX];
891
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800892 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700893 if (mCameraHandle) {
894 LOGE("Failure: Camera already opened");
895 return ALREADY_EXISTS;
896 }
897
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700898 {
899 Mutex::Autolock l(gHdrPlusClientLock);
900 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700901 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700902 rc = gEaselManagerClient.resume();
903 if (rc != 0) {
904 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
905 return rc;
906 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800907 }
908 }
909
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
911 if (rc < 0) {
912 LOGE("Failed to reserve flash for camera id: %d",
913 mCameraId);
914 return UNKNOWN_ERROR;
915 }
916
917 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
918 if (rc) {
919 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
920 return rc;
921 }
922
923 if (!mCameraHandle) {
924 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
925 return -ENODEV;
926 }
927
928 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
929 camEvtHandle, (void *)this);
930
931 if (rc < 0) {
932 LOGE("Error, failed to register event callback");
933 /* Not closing camera here since it is already handled in destructor */
934 return FAILED_TRANSACTION;
935 }
936
937 mExifParams.debug_params =
938 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
939 if (mExifParams.debug_params) {
940 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
941 } else {
942 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
943 return NO_MEMORY;
944 }
945 mFirstConfiguration = true;
946
947 //Notify display HAL that a camera session is active.
948 //But avoid calling the same during bootup because camera service might open/close
949 //cameras at boot time during its initialization and display service will also internally
950 //wait for camera service to initialize first while calling this display API, resulting in a
951 //deadlock situation. Since boot time camera open/close calls are made only to fetch
952 //capabilities, no need of this display bw optimization.
953 //Use "service.bootanim.exit" property to know boot status.
954 property_get("service.bootanim.exit", value, "0");
955 if (atoi(value) == 1) {
956 pthread_mutex_lock(&gCamLock);
957 if (gNumCameraSessions++ == 0) {
958 setCameraLaunchStatus(true);
959 }
960 pthread_mutex_unlock(&gCamLock);
961 }
962
963 //fill the session id needed while linking dual cam
964 pthread_mutex_lock(&gCamLock);
965 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
966 &sessionId[mCameraId]);
967 pthread_mutex_unlock(&gCamLock);
968
969 if (rc < 0) {
970 LOGE("Error, failed to get sessiion id");
971 return UNKNOWN_ERROR;
972 } else {
973 //Allocate related cam sync buffer
974 //this is needed for the payload that goes along with bundling cmd for related
975 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700976 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
977 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700978 if(rc != OK) {
979 rc = NO_MEMORY;
980 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
981 return NO_MEMORY;
982 }
983
984 //Map memory for related cam sync buffer
985 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700986 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
987 m_pDualCamCmdHeap->getFd(0),
988 sizeof(cam_dual_camera_cmd_info_t),
989 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700990 if(rc < 0) {
991 LOGE("Dualcam: failed to map Related cam sync buffer");
992 rc = FAILED_TRANSACTION;
993 return NO_MEMORY;
994 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700995 m_pDualCamCmdPtr =
996 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700997 }
998
999 LOGH("mCameraId=%d",mCameraId);
1000
1001 return NO_ERROR;
1002}
1003
1004/*===========================================================================
1005 * FUNCTION : closeCamera
1006 *
1007 * DESCRIPTION: close camera
1008 *
1009 * PARAMETERS : none
1010 *
1011 * RETURN : int32_t type of status
1012 * NO_ERROR -- success
1013 * none-zero failure code
1014 *==========================================================================*/
1015int QCamera3HardwareInterface::closeCamera()
1016{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001017 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001018 int rc = NO_ERROR;
1019 char value[PROPERTY_VALUE_MAX];
1020
1021 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1022 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001023
1024 // unmap memory for related cam sync buffer
1025 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001026 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001027 if (NULL != m_pDualCamCmdHeap) {
1028 m_pDualCamCmdHeap->deallocate();
1029 delete m_pDualCamCmdHeap;
1030 m_pDualCamCmdHeap = NULL;
1031 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001032 }
1033
Thierry Strudel3d639192016-09-09 11:52:26 -07001034 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1035 mCameraHandle = NULL;
1036
1037 //reset session id to some invalid id
1038 pthread_mutex_lock(&gCamLock);
1039 sessionId[mCameraId] = 0xDEADBEEF;
1040 pthread_mutex_unlock(&gCamLock);
1041
1042 //Notify display HAL that there is no active camera session
1043 //but avoid calling the same during bootup. Refer to openCamera
1044 //for more details.
1045 property_get("service.bootanim.exit", value, "0");
1046 if (atoi(value) == 1) {
1047 pthread_mutex_lock(&gCamLock);
1048 if (--gNumCameraSessions == 0) {
1049 setCameraLaunchStatus(false);
1050 }
1051 pthread_mutex_unlock(&gCamLock);
1052 }
1053
Thierry Strudel3d639192016-09-09 11:52:26 -07001054 if (mExifParams.debug_params) {
1055 free(mExifParams.debug_params);
1056 mExifParams.debug_params = NULL;
1057 }
1058 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1059 LOGW("Failed to release flash for camera id: %d",
1060 mCameraId);
1061 }
1062 mState = CLOSED;
1063 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1064 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001065
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001066 {
1067 Mutex::Autolock l(gHdrPlusClientLock);
1068 if (gHdrPlusClient != nullptr) {
1069 // Disable HDR+ mode.
1070 disableHdrPlusModeLocked();
1071 // Disconnect Easel if it's connected.
1072 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1073 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001074 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001075
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001076 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001077 rc = gEaselManagerClient.stopMipi(mCameraId);
1078 if (rc != 0) {
1079 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1080 }
1081
1082 rc = gEaselManagerClient.suspend();
1083 if (rc != 0) {
1084 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1085 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001086 }
1087 }
1088
Thierry Strudel3d639192016-09-09 11:52:26 -07001089 return rc;
1090}
1091
1092/*===========================================================================
1093 * FUNCTION : initialize
1094 *
1095 * DESCRIPTION: Initialize frameworks callback functions
1096 *
1097 * PARAMETERS :
1098 * @callback_ops : callback function to frameworks
1099 *
1100 * RETURN :
1101 *
1102 *==========================================================================*/
1103int QCamera3HardwareInterface::initialize(
1104 const struct camera3_callback_ops *callback_ops)
1105{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001106 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001107 int rc;
1108
1109 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1110 pthread_mutex_lock(&mMutex);
1111
1112 // Validate current state
1113 switch (mState) {
1114 case OPENED:
1115 /* valid state */
1116 break;
1117 default:
1118 LOGE("Invalid state %d", mState);
1119 rc = -ENODEV;
1120 goto err1;
1121 }
1122
1123 rc = initParameters();
1124 if (rc < 0) {
1125 LOGE("initParamters failed %d", rc);
1126 goto err1;
1127 }
1128 mCallbackOps = callback_ops;
1129
1130 mChannelHandle = mCameraHandle->ops->add_channel(
1131 mCameraHandle->camera_handle, NULL, NULL, this);
1132 if (mChannelHandle == 0) {
1133 LOGE("add_channel failed");
1134 rc = -ENOMEM;
1135 pthread_mutex_unlock(&mMutex);
1136 return rc;
1137 }
1138
1139 pthread_mutex_unlock(&mMutex);
1140 mCameraInitialized = true;
1141 mState = INITIALIZED;
1142 LOGI("X");
1143 return 0;
1144
1145err1:
1146 pthread_mutex_unlock(&mMutex);
1147 return rc;
1148}
1149
1150/*===========================================================================
1151 * FUNCTION : validateStreamDimensions
1152 *
1153 * DESCRIPTION: Check if the configuration requested are those advertised
1154 *
1155 * PARAMETERS :
1156 * @stream_list : streams to be configured
1157 *
1158 * RETURN :
1159 *
1160 *==========================================================================*/
1161int QCamera3HardwareInterface::validateStreamDimensions(
1162 camera3_stream_configuration_t *streamList)
1163{
1164 int rc = NO_ERROR;
1165 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001166 uint32_t depthWidth = 0;
1167 uint32_t depthHeight = 0;
1168 if (mPDSupported) {
1169 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1170 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001172
1173 camera3_stream_t *inputStream = NULL;
1174 /*
1175 * Loop through all streams to find input stream if it exists*
1176 */
1177 for (size_t i = 0; i< streamList->num_streams; i++) {
1178 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1179 if (inputStream != NULL) {
1180 LOGE("Error, Multiple input streams requested");
1181 return -EINVAL;
1182 }
1183 inputStream = streamList->streams[i];
1184 }
1185 }
1186 /*
1187 * Loop through all streams requested in configuration
1188 * Check if unsupported sizes have been requested on any of them
1189 */
1190 for (size_t j = 0; j < streamList->num_streams; j++) {
1191 bool sizeFound = false;
1192 camera3_stream_t *newStream = streamList->streams[j];
1193
1194 uint32_t rotatedHeight = newStream->height;
1195 uint32_t rotatedWidth = newStream->width;
1196 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1197 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1198 rotatedHeight = newStream->width;
1199 rotatedWidth = newStream->height;
1200 }
1201
1202 /*
1203 * Sizes are different for each type of stream format check against
1204 * appropriate table.
1205 */
1206 switch (newStream->format) {
1207 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1208 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1209 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001210 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1211 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1212 mPDSupported) {
1213 if ((depthWidth == newStream->width) &&
1214 (depthHeight == newStream->height)) {
1215 sizeFound = true;
1216 }
1217 break;
1218 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001219 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1220 for (size_t i = 0; i < count; i++) {
1221 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1222 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1223 sizeFound = true;
1224 break;
1225 }
1226 }
1227 break;
1228 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001229 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1230 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001231 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001232 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001233 if ((depthSamplesCount == newStream->width) &&
1234 (1 == newStream->height)) {
1235 sizeFound = true;
1236 }
1237 break;
1238 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001239 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1240 /* Verify set size against generated sizes table */
1241 for (size_t i = 0; i < count; i++) {
1242 if (((int32_t)rotatedWidth ==
1243 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1244 ((int32_t)rotatedHeight ==
1245 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1246 sizeFound = true;
1247 break;
1248 }
1249 }
1250 break;
1251 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1252 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1253 default:
1254 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1255 || newStream->stream_type == CAMERA3_STREAM_INPUT
1256 || IS_USAGE_ZSL(newStream->usage)) {
1257 if (((int32_t)rotatedWidth ==
1258 gCamCapability[mCameraId]->active_array_size.width) &&
1259 ((int32_t)rotatedHeight ==
1260 gCamCapability[mCameraId]->active_array_size.height)) {
1261 sizeFound = true;
1262 break;
1263 }
1264 /* We could potentially break here to enforce ZSL stream
1265 * set from frameworks always is full active array size
1266 * but it is not clear from the spc if framework will always
1267 * follow that, also we have logic to override to full array
1268 * size, so keeping the logic lenient at the moment
1269 */
1270 }
1271 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1272 MAX_SIZES_CNT);
1273 for (size_t i = 0; i < count; i++) {
1274 if (((int32_t)rotatedWidth ==
1275 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1276 ((int32_t)rotatedHeight ==
1277 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1278 sizeFound = true;
1279 break;
1280 }
1281 }
1282 break;
1283 } /* End of switch(newStream->format) */
1284
1285 /* We error out even if a single stream has unsupported size set */
1286 if (!sizeFound) {
1287 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1288 rotatedWidth, rotatedHeight, newStream->format,
1289 gCamCapability[mCameraId]->active_array_size.width,
1290 gCamCapability[mCameraId]->active_array_size.height);
1291 rc = -EINVAL;
1292 break;
1293 }
1294 } /* End of for each stream */
1295 return rc;
1296}
1297
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001298/*===========================================================================
1299 * FUNCTION : validateUsageFlags
1300 *
1301 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1302 *
1303 * PARAMETERS :
1304 * @stream_list : streams to be configured
1305 *
1306 * RETURN :
1307 * NO_ERROR if the usage flags are supported
1308 * error code if usage flags are not supported
1309 *
1310 *==========================================================================*/
1311int QCamera3HardwareInterface::validateUsageFlags(
1312 const camera3_stream_configuration_t* streamList)
1313{
1314 for (size_t j = 0; j < streamList->num_streams; j++) {
1315 const camera3_stream_t *newStream = streamList->streams[j];
1316
1317 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1318 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1319 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1320 continue;
1321 }
1322
1323 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1324 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1325 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1326 bool forcePreviewUBWC = true;
1327 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1328 forcePreviewUBWC = false;
1329 }
1330 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1331 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1332 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1333 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1334 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1335 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1336
1337 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1338 // So color spaces will always match.
1339
1340 // Check whether underlying formats of shared streams match.
1341 if (isVideo && isPreview && videoFormat != previewFormat) {
1342 LOGE("Combined video and preview usage flag is not supported");
1343 return -EINVAL;
1344 }
1345 if (isPreview && isZSL && previewFormat != zslFormat) {
1346 LOGE("Combined preview and zsl usage flag is not supported");
1347 return -EINVAL;
1348 }
1349 if (isVideo && isZSL && videoFormat != zslFormat) {
1350 LOGE("Combined video and zsl usage flag is not supported");
1351 return -EINVAL;
1352 }
1353 }
1354 return NO_ERROR;
1355}
1356
1357/*===========================================================================
1358 * FUNCTION : validateUsageFlagsForEis
1359 *
1360 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1361 *
1362 * PARAMETERS :
1363 * @stream_list : streams to be configured
1364 *
1365 * RETURN :
1366 * NO_ERROR if the usage flags are supported
1367 * error code if usage flags are not supported
1368 *
1369 *==========================================================================*/
1370int QCamera3HardwareInterface::validateUsageFlagsForEis(
1371 const camera3_stream_configuration_t* streamList)
1372{
1373 for (size_t j = 0; j < streamList->num_streams; j++) {
1374 const camera3_stream_t *newStream = streamList->streams[j];
1375
1376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378
1379 // Because EIS is "hard-coded" for certain use case, and current
1380 // implementation doesn't support shared preview and video on the same
1381 // stream, return failure if EIS is forced on.
1382 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1383 LOGE("Combined video and preview usage flag is not supported due to EIS");
1384 return -EINVAL;
1385 }
1386 }
1387 return NO_ERROR;
1388}
1389
Thierry Strudel3d639192016-09-09 11:52:26 -07001390/*==============================================================================
1391 * FUNCTION : isSupportChannelNeeded
1392 *
1393 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1394 *
1395 * PARAMETERS :
1396 * @stream_list : streams to be configured
1397 * @stream_config_info : the config info for streams to be configured
1398 *
1399 * RETURN : Boolen true/false decision
1400 *
1401 *==========================================================================*/
1402bool QCamera3HardwareInterface::isSupportChannelNeeded(
1403 camera3_stream_configuration_t *streamList,
1404 cam_stream_size_info_t stream_config_info)
1405{
1406 uint32_t i;
1407 bool pprocRequested = false;
1408 /* Check for conditions where PProc pipeline does not have any streams*/
1409 for (i = 0; i < stream_config_info.num_streams; i++) {
1410 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1411 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1412 pprocRequested = true;
1413 break;
1414 }
1415 }
1416
1417 if (pprocRequested == false )
1418 return true;
1419
1420 /* Dummy stream needed if only raw or jpeg streams present */
1421 for (i = 0; i < streamList->num_streams; i++) {
1422 switch(streamList->streams[i]->format) {
1423 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1424 case HAL_PIXEL_FORMAT_RAW10:
1425 case HAL_PIXEL_FORMAT_RAW16:
1426 case HAL_PIXEL_FORMAT_BLOB:
1427 break;
1428 default:
1429 return false;
1430 }
1431 }
1432 return true;
1433}
1434
1435/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001436 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001437 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001438 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001439 *
1440 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001441 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001442 *
1443 * RETURN : int32_t type of status
1444 * NO_ERROR -- success
1445 * none-zero failure code
1446 *
1447 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001448int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001449{
1450 int32_t rc = NO_ERROR;
1451
1452 cam_dimension_t max_dim = {0, 0};
1453 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1454 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1455 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1456 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1457 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1458 }
1459
1460 clear_metadata_buffer(mParameters);
1461
1462 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1463 max_dim);
1464 if (rc != NO_ERROR) {
1465 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1466 return rc;
1467 }
1468
1469 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1470 if (rc != NO_ERROR) {
1471 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1472 return rc;
1473 }
1474
1475 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001476 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001477
1478 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1479 mParameters);
1480 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 return rc;
1483 }
1484
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001485 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001486 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1487 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1488 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1489 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1490 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001491
1492 return rc;
1493}
1494
1495/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001496 * FUNCTION : addToPPFeatureMask
1497 *
1498 * DESCRIPTION: add additional features to pp feature mask based on
1499 * stream type and usecase
1500 *
1501 * PARAMETERS :
1502 * @stream_format : stream type for feature mask
1503 * @stream_idx : stream idx within postprocess_mask list to change
1504 *
1505 * RETURN : NULL
1506 *
1507 *==========================================================================*/
1508void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1509 uint32_t stream_idx)
1510{
1511 char feature_mask_value[PROPERTY_VALUE_MAX];
1512 cam_feature_mask_t feature_mask;
1513 int args_converted;
1514 int property_len;
1515
1516 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001517#ifdef _LE_CAMERA_
1518 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1519 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1520 property_len = property_get("persist.camera.hal3.feature",
1521 feature_mask_value, swtnr_feature_mask_value);
1522#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001523 property_len = property_get("persist.camera.hal3.feature",
1524 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001525#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001526 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1527 (feature_mask_value[1] == 'x')) {
1528 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1529 } else {
1530 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1531 }
1532 if (1 != args_converted) {
1533 feature_mask = 0;
1534 LOGE("Wrong feature mask %s", feature_mask_value);
1535 return;
1536 }
1537
1538 switch (stream_format) {
1539 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1540 /* Add LLVD to pp feature mask only if video hint is enabled */
1541 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1542 mStreamConfigInfo.postprocess_mask[stream_idx]
1543 |= CAM_QTI_FEATURE_SW_TNR;
1544 LOGH("Added SW TNR to pp feature mask");
1545 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1546 mStreamConfigInfo.postprocess_mask[stream_idx]
1547 |= CAM_QCOM_FEATURE_LLVD;
1548 LOGH("Added LLVD SeeMore to pp feature mask");
1549 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001550 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1551 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1552 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1553 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001554 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1555 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1556 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1557 CAM_QTI_FEATURE_BINNING_CORRECTION;
1558 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001559 break;
1560 }
1561 default:
1562 break;
1563 }
1564 LOGD("PP feature mask %llx",
1565 mStreamConfigInfo.postprocess_mask[stream_idx]);
1566}
1567
1568/*==============================================================================
1569 * FUNCTION : updateFpsInPreviewBuffer
1570 *
1571 * DESCRIPTION: update FPS information in preview buffer.
1572 *
1573 * PARAMETERS :
1574 * @metadata : pointer to metadata buffer
1575 * @frame_number: frame_number to look for in pending buffer list
1576 *
1577 * RETURN : None
1578 *
1579 *==========================================================================*/
1580void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1581 uint32_t frame_number)
1582{
1583 // Mark all pending buffers for this particular request
1584 // with corresponding framerate information
1585 for (List<PendingBuffersInRequest>::iterator req =
1586 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1587 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1588 for(List<PendingBufferInfo>::iterator j =
1589 req->mPendingBufferList.begin();
1590 j != req->mPendingBufferList.end(); j++) {
1591 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1592 if ((req->frame_number == frame_number) &&
1593 (channel->getStreamTypeMask() &
1594 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1595 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1596 CAM_INTF_PARM_FPS_RANGE, metadata) {
1597 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1598 struct private_handle_t *priv_handle =
1599 (struct private_handle_t *)(*(j->buffer));
1600 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1601 }
1602 }
1603 }
1604 }
1605}
1606
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001607/*==============================================================================
1608 * FUNCTION : updateTimeStampInPendingBuffers
1609 *
1610 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1611 * of a frame number
1612 *
1613 * PARAMETERS :
1614 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1615 * @timestamp : timestamp to be set
1616 *
1617 * RETURN : None
1618 *
1619 *==========================================================================*/
1620void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1621 uint32_t frameNumber, nsecs_t timestamp)
1622{
1623 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1624 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1625 if (req->frame_number != frameNumber)
1626 continue;
1627
1628 for (auto k = req->mPendingBufferList.begin();
1629 k != req->mPendingBufferList.end(); k++ ) {
1630 struct private_handle_t *priv_handle =
1631 (struct private_handle_t *) (*(k->buffer));
1632 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1633 }
1634 }
1635 return;
1636}
1637
Thierry Strudel3d639192016-09-09 11:52:26 -07001638/*===========================================================================
1639 * FUNCTION : configureStreams
1640 *
1641 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1642 * and output streams.
1643 *
1644 * PARAMETERS :
1645 * @stream_list : streams to be configured
1646 *
1647 * RETURN :
1648 *
1649 *==========================================================================*/
1650int QCamera3HardwareInterface::configureStreams(
1651 camera3_stream_configuration_t *streamList)
1652{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001653 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001654 int rc = 0;
1655
1656 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001657 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001658 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001659 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001660
1661 return rc;
1662}
1663
1664/*===========================================================================
1665 * FUNCTION : configureStreamsPerfLocked
1666 *
1667 * DESCRIPTION: configureStreams while perfLock is held.
1668 *
1669 * PARAMETERS :
1670 * @stream_list : streams to be configured
1671 *
1672 * RETURN : int32_t type of status
1673 * NO_ERROR -- success
1674 * none-zero failure code
1675 *==========================================================================*/
1676int QCamera3HardwareInterface::configureStreamsPerfLocked(
1677 camera3_stream_configuration_t *streamList)
1678{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001679 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001680 int rc = 0;
1681
1682 // Sanity check stream_list
1683 if (streamList == NULL) {
1684 LOGE("NULL stream configuration");
1685 return BAD_VALUE;
1686 }
1687 if (streamList->streams == NULL) {
1688 LOGE("NULL stream list");
1689 return BAD_VALUE;
1690 }
1691
1692 if (streamList->num_streams < 1) {
1693 LOGE("Bad number of streams requested: %d",
1694 streamList->num_streams);
1695 return BAD_VALUE;
1696 }
1697
1698 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1699 LOGE("Maximum number of streams %d exceeded: %d",
1700 MAX_NUM_STREAMS, streamList->num_streams);
1701 return BAD_VALUE;
1702 }
1703
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001704 rc = validateUsageFlags(streamList);
1705 if (rc != NO_ERROR) {
1706 return rc;
1707 }
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709 mOpMode = streamList->operation_mode;
1710 LOGD("mOpMode: %d", mOpMode);
1711
1712 /* first invalidate all the steams in the mStreamList
1713 * if they appear again, they will be validated */
1714 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1715 it != mStreamInfo.end(); it++) {
1716 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1717 if (channel) {
1718 channel->stop();
1719 }
1720 (*it)->status = INVALID;
1721 }
1722
1723 if (mRawDumpChannel) {
1724 mRawDumpChannel->stop();
1725 delete mRawDumpChannel;
1726 mRawDumpChannel = NULL;
1727 }
1728
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001729 if (mHdrPlusRawSrcChannel) {
1730 mHdrPlusRawSrcChannel->stop();
1731 delete mHdrPlusRawSrcChannel;
1732 mHdrPlusRawSrcChannel = NULL;
1733 }
1734
Thierry Strudel3d639192016-09-09 11:52:26 -07001735 if (mSupportChannel)
1736 mSupportChannel->stop();
1737
1738 if (mAnalysisChannel) {
1739 mAnalysisChannel->stop();
1740 }
1741 if (mMetadataChannel) {
1742 /* If content of mStreamInfo is not 0, there is metadata stream */
1743 mMetadataChannel->stop();
1744 }
1745 if (mChannelHandle) {
1746 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1747 mChannelHandle);
1748 LOGD("stopping channel %d", mChannelHandle);
1749 }
1750
1751 pthread_mutex_lock(&mMutex);
1752
1753 // Check state
1754 switch (mState) {
1755 case INITIALIZED:
1756 case CONFIGURED:
1757 case STARTED:
1758 /* valid state */
1759 break;
1760 default:
1761 LOGE("Invalid state %d", mState);
1762 pthread_mutex_unlock(&mMutex);
1763 return -ENODEV;
1764 }
1765
1766 /* Check whether we have video stream */
1767 m_bIs4KVideo = false;
1768 m_bIsVideo = false;
1769 m_bEisSupportedSize = false;
1770 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001771 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001772 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001773 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001774 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001775 uint32_t videoWidth = 0U;
1776 uint32_t videoHeight = 0U;
1777 size_t rawStreamCnt = 0;
1778 size_t stallStreamCnt = 0;
1779 size_t processedStreamCnt = 0;
1780 // Number of streams on ISP encoder path
1781 size_t numStreamsOnEncoder = 0;
1782 size_t numYuv888OnEncoder = 0;
1783 bool bYuv888OverrideJpeg = false;
1784 cam_dimension_t largeYuv888Size = {0, 0};
1785 cam_dimension_t maxViewfinderSize = {0, 0};
1786 bool bJpegExceeds4K = false;
1787 bool bJpegOnEncoder = false;
1788 bool bUseCommonFeatureMask = false;
1789 cam_feature_mask_t commonFeatureMask = 0;
1790 bool bSmallJpegSize = false;
1791 uint32_t width_ratio;
1792 uint32_t height_ratio;
1793 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1794 camera3_stream_t *inputStream = NULL;
1795 bool isJpeg = false;
1796 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001797 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001798 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001799
1800 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1801
1802 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001803 uint8_t eis_prop_set;
1804 uint32_t maxEisWidth = 0;
1805 uint32_t maxEisHeight = 0;
1806
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001807 // Initialize all instant AEC related variables
1808 mInstantAEC = false;
1809 mResetInstantAEC = false;
1810 mInstantAECSettledFrameNumber = 0;
1811 mAecSkipDisplayFrameBound = 0;
1812 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001813 mCurrFeatureState = 0;
1814 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001815
Thierry Strudel3d639192016-09-09 11:52:26 -07001816 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1817
1818 size_t count = IS_TYPE_MAX;
1819 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1820 for (size_t i = 0; i < count; i++) {
1821 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001822 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1823 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 break;
1825 }
1826 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001827
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001828 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001829 maxEisWidth = MAX_EIS_WIDTH;
1830 maxEisHeight = MAX_EIS_HEIGHT;
1831 }
1832
1833 /* EIS setprop control */
1834 char eis_prop[PROPERTY_VALUE_MAX];
1835 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001836 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001837 eis_prop_set = (uint8_t)atoi(eis_prop);
1838
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001839 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001840 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1841
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001842 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1843 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001844
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 /* stream configurations */
1846 for (size_t i = 0; i < streamList->num_streams; i++) {
1847 camera3_stream_t *newStream = streamList->streams[i];
1848 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1849 "height = %d, rotation = %d, usage = 0x%x",
1850 i, newStream->stream_type, newStream->format,
1851 newStream->width, newStream->height, newStream->rotation,
1852 newStream->usage);
1853 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1854 newStream->stream_type == CAMERA3_STREAM_INPUT){
1855 isZsl = true;
1856 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001857 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1858 IS_USAGE_PREVIEW(newStream->usage)) {
1859 isPreview = true;
1860 }
1861
Thierry Strudel3d639192016-09-09 11:52:26 -07001862 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1863 inputStream = newStream;
1864 }
1865
Emilian Peev7650c122017-01-19 08:24:33 -08001866 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1867 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001868 isJpeg = true;
1869 jpegSize.width = newStream->width;
1870 jpegSize.height = newStream->height;
1871 if (newStream->width > VIDEO_4K_WIDTH ||
1872 newStream->height > VIDEO_4K_HEIGHT)
1873 bJpegExceeds4K = true;
1874 }
1875
1876 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1877 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1878 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001879 // In HAL3 we can have multiple different video streams.
1880 // The variables video width and height are used below as
1881 // dimensions of the biggest of them
1882 if (videoWidth < newStream->width ||
1883 videoHeight < newStream->height) {
1884 videoWidth = newStream->width;
1885 videoHeight = newStream->height;
1886 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1888 (VIDEO_4K_HEIGHT <= newStream->height)) {
1889 m_bIs4KVideo = true;
1890 }
1891 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1892 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001893
Thierry Strudel3d639192016-09-09 11:52:26 -07001894 }
1895 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1896 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1897 switch (newStream->format) {
1898 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001899 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1900 depthPresent = true;
1901 break;
1902 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001903 stallStreamCnt++;
1904 if (isOnEncoder(maxViewfinderSize, newStream->width,
1905 newStream->height)) {
1906 numStreamsOnEncoder++;
1907 bJpegOnEncoder = true;
1908 }
1909 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1910 newStream->width);
1911 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1912 newStream->height);;
1913 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1914 "FATAL: max_downscale_factor cannot be zero and so assert");
1915 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1916 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1917 LOGH("Setting small jpeg size flag to true");
1918 bSmallJpegSize = true;
1919 }
1920 break;
1921 case HAL_PIXEL_FORMAT_RAW10:
1922 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1923 case HAL_PIXEL_FORMAT_RAW16:
1924 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001925 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1926 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1927 pdStatCount++;
1928 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001929 break;
1930 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1931 processedStreamCnt++;
1932 if (isOnEncoder(maxViewfinderSize, newStream->width,
1933 newStream->height)) {
1934 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1935 !IS_USAGE_ZSL(newStream->usage)) {
1936 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1937 }
1938 numStreamsOnEncoder++;
1939 }
1940 break;
1941 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1942 processedStreamCnt++;
1943 if (isOnEncoder(maxViewfinderSize, newStream->width,
1944 newStream->height)) {
1945 // If Yuv888 size is not greater than 4K, set feature mask
1946 // to SUPERSET so that it support concurrent request on
1947 // YUV and JPEG.
1948 if (newStream->width <= VIDEO_4K_WIDTH &&
1949 newStream->height <= VIDEO_4K_HEIGHT) {
1950 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1951 }
1952 numStreamsOnEncoder++;
1953 numYuv888OnEncoder++;
1954 largeYuv888Size.width = newStream->width;
1955 largeYuv888Size.height = newStream->height;
1956 }
1957 break;
1958 default:
1959 processedStreamCnt++;
1960 if (isOnEncoder(maxViewfinderSize, newStream->width,
1961 newStream->height)) {
1962 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1963 numStreamsOnEncoder++;
1964 }
1965 break;
1966 }
1967
1968 }
1969 }
1970
1971 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1972 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1973 !m_bIsVideo) {
1974 m_bEisEnable = false;
1975 }
1976
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001977 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1978 pthread_mutex_unlock(&mMutex);
1979 return -EINVAL;
1980 }
1981
Thierry Strudel54dc9782017-02-15 12:12:10 -08001982 uint8_t forceEnableTnr = 0;
1983 char tnr_prop[PROPERTY_VALUE_MAX];
1984 memset(tnr_prop, 0, sizeof(tnr_prop));
1985 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1986 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1987
Thierry Strudel3d639192016-09-09 11:52:26 -07001988 /* Logic to enable/disable TNR based on specific config size/etc.*/
1989 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001990 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1991 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001992 else if (forceEnableTnr)
1993 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001994
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001995 char videoHdrProp[PROPERTY_VALUE_MAX];
1996 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1997 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1998 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1999
2000 if (hdr_mode_prop == 1 && m_bIsVideo &&
2001 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2002 m_bVideoHdrEnabled = true;
2003 else
2004 m_bVideoHdrEnabled = false;
2005
2006
Thierry Strudel3d639192016-09-09 11:52:26 -07002007 /* Check if num_streams is sane */
2008 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2009 rawStreamCnt > MAX_RAW_STREAMS ||
2010 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2011 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2012 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2013 pthread_mutex_unlock(&mMutex);
2014 return -EINVAL;
2015 }
2016 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002017 if (isZsl && m_bIs4KVideo) {
2018 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002019 pthread_mutex_unlock(&mMutex);
2020 return -EINVAL;
2021 }
2022 /* Check if stream sizes are sane */
2023 if (numStreamsOnEncoder > 2) {
2024 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2025 pthread_mutex_unlock(&mMutex);
2026 return -EINVAL;
2027 } else if (1 < numStreamsOnEncoder){
2028 bUseCommonFeatureMask = true;
2029 LOGH("Multiple streams above max viewfinder size, common mask needed");
2030 }
2031
2032 /* Check if BLOB size is greater than 4k in 4k recording case */
2033 if (m_bIs4KVideo && bJpegExceeds4K) {
2034 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2035 pthread_mutex_unlock(&mMutex);
2036 return -EINVAL;
2037 }
2038
Emilian Peev7650c122017-01-19 08:24:33 -08002039 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2040 depthPresent) {
2041 LOGE("HAL doesn't support depth streams in HFR mode!");
2042 pthread_mutex_unlock(&mMutex);
2043 return -EINVAL;
2044 }
2045
Thierry Strudel3d639192016-09-09 11:52:26 -07002046 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2047 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2048 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2049 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2050 // configurations:
2051 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2052 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2053 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2054 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2055 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2056 __func__);
2057 pthread_mutex_unlock(&mMutex);
2058 return -EINVAL;
2059 }
2060
2061 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2062 // the YUV stream's size is greater or equal to the JPEG size, set common
2063 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2064 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2065 jpegSize.width, jpegSize.height) &&
2066 largeYuv888Size.width > jpegSize.width &&
2067 largeYuv888Size.height > jpegSize.height) {
2068 bYuv888OverrideJpeg = true;
2069 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2070 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2071 }
2072
2073 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2074 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2075 commonFeatureMask);
2076 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2077 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2078
2079 rc = validateStreamDimensions(streamList);
2080 if (rc == NO_ERROR) {
2081 rc = validateStreamRotations(streamList);
2082 }
2083 if (rc != NO_ERROR) {
2084 LOGE("Invalid stream configuration requested!");
2085 pthread_mutex_unlock(&mMutex);
2086 return rc;
2087 }
2088
Emilian Peev0f3c3162017-03-15 12:57:46 +00002089 if (1 < pdStatCount) {
2090 LOGE("HAL doesn't support multiple PD streams");
2091 pthread_mutex_unlock(&mMutex);
2092 return -EINVAL;
2093 }
2094
2095 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2096 (1 == pdStatCount)) {
2097 LOGE("HAL doesn't support PD streams in HFR mode!");
2098 pthread_mutex_unlock(&mMutex);
2099 return -EINVAL;
2100 }
2101
Thierry Strudel3d639192016-09-09 11:52:26 -07002102 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2103 for (size_t i = 0; i < streamList->num_streams; i++) {
2104 camera3_stream_t *newStream = streamList->streams[i];
2105 LOGH("newStream type = %d, stream format = %d "
2106 "stream size : %d x %d, stream rotation = %d",
2107 newStream->stream_type, newStream->format,
2108 newStream->width, newStream->height, newStream->rotation);
2109 //if the stream is in the mStreamList validate it
2110 bool stream_exists = false;
2111 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2112 it != mStreamInfo.end(); it++) {
2113 if ((*it)->stream == newStream) {
2114 QCamera3ProcessingChannel *channel =
2115 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2116 stream_exists = true;
2117 if (channel)
2118 delete channel;
2119 (*it)->status = VALID;
2120 (*it)->stream->priv = NULL;
2121 (*it)->channel = NULL;
2122 }
2123 }
2124 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2125 //new stream
2126 stream_info_t* stream_info;
2127 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2128 if (!stream_info) {
2129 LOGE("Could not allocate stream info");
2130 rc = -ENOMEM;
2131 pthread_mutex_unlock(&mMutex);
2132 return rc;
2133 }
2134 stream_info->stream = newStream;
2135 stream_info->status = VALID;
2136 stream_info->channel = NULL;
2137 mStreamInfo.push_back(stream_info);
2138 }
2139 /* Covers Opaque ZSL and API1 F/W ZSL */
2140 if (IS_USAGE_ZSL(newStream->usage)
2141 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2142 if (zslStream != NULL) {
2143 LOGE("Multiple input/reprocess streams requested!");
2144 pthread_mutex_unlock(&mMutex);
2145 return BAD_VALUE;
2146 }
2147 zslStream = newStream;
2148 }
2149 /* Covers YUV reprocess */
2150 if (inputStream != NULL) {
2151 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2152 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2153 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2154 && inputStream->width == newStream->width
2155 && inputStream->height == newStream->height) {
2156 if (zslStream != NULL) {
2157 /* This scenario indicates multiple YUV streams with same size
2158 * as input stream have been requested, since zsl stream handle
2159 * is solely use for the purpose of overriding the size of streams
2160 * which share h/w streams we will just make a guess here as to
2161 * which of the stream is a ZSL stream, this will be refactored
2162 * once we make generic logic for streams sharing encoder output
2163 */
2164 LOGH("Warning, Multiple ip/reprocess streams requested!");
2165 }
2166 zslStream = newStream;
2167 }
2168 }
2169 }
2170
2171 /* If a zsl stream is set, we know that we have configured at least one input or
2172 bidirectional stream */
2173 if (NULL != zslStream) {
2174 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2175 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2176 mInputStreamInfo.format = zslStream->format;
2177 mInputStreamInfo.usage = zslStream->usage;
2178 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2179 mInputStreamInfo.dim.width,
2180 mInputStreamInfo.dim.height,
2181 mInputStreamInfo.format, mInputStreamInfo.usage);
2182 }
2183
2184 cleanAndSortStreamInfo();
2185 if (mMetadataChannel) {
2186 delete mMetadataChannel;
2187 mMetadataChannel = NULL;
2188 }
2189 if (mSupportChannel) {
2190 delete mSupportChannel;
2191 mSupportChannel = NULL;
2192 }
2193
2194 if (mAnalysisChannel) {
2195 delete mAnalysisChannel;
2196 mAnalysisChannel = NULL;
2197 }
2198
2199 if (mDummyBatchChannel) {
2200 delete mDummyBatchChannel;
2201 mDummyBatchChannel = NULL;
2202 }
2203
Emilian Peev7650c122017-01-19 08:24:33 -08002204 if (mDepthChannel) {
2205 mDepthChannel = NULL;
2206 }
2207
Thierry Strudel2896d122017-02-23 19:18:03 -08002208 char is_type_value[PROPERTY_VALUE_MAX];
2209 property_get("persist.camera.is_type", is_type_value, "4");
2210 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2211
Binhao Line406f062017-05-03 14:39:44 -07002212 char property_value[PROPERTY_VALUE_MAX];
2213 property_get("persist.camera.gzoom.at", property_value, "0");
2214 int goog_zoom_at = atoi(property_value);
2215 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2216 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2217
2218 property_get("persist.camera.gzoom.4k", property_value, "0");
2219 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2220
Thierry Strudel3d639192016-09-09 11:52:26 -07002221 //Create metadata channel and initialize it
2222 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2223 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2224 gCamCapability[mCameraId]->color_arrangement);
2225 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2226 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002227 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002228 if (mMetadataChannel == NULL) {
2229 LOGE("failed to allocate metadata channel");
2230 rc = -ENOMEM;
2231 pthread_mutex_unlock(&mMutex);
2232 return rc;
2233 }
2234 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2235 if (rc < 0) {
2236 LOGE("metadata channel initialization failed");
2237 delete mMetadataChannel;
2238 mMetadataChannel = NULL;
2239 pthread_mutex_unlock(&mMutex);
2240 return rc;
2241 }
2242
Thierry Strudel2896d122017-02-23 19:18:03 -08002243 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002244 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002245 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002246 // Keep track of preview/video streams indices.
2247 // There could be more than one preview streams, but only one video stream.
2248 int32_t video_stream_idx = -1;
2249 int32_t preview_stream_idx[streamList->num_streams];
2250 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002251 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2252 /* Allocate channel objects for the requested streams */
2253 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002254
Thierry Strudel3d639192016-09-09 11:52:26 -07002255 camera3_stream_t *newStream = streamList->streams[i];
2256 uint32_t stream_usage = newStream->usage;
2257 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2258 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2259 struct camera_info *p_info = NULL;
2260 pthread_mutex_lock(&gCamLock);
2261 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2262 pthread_mutex_unlock(&gCamLock);
2263 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2264 || IS_USAGE_ZSL(newStream->usage)) &&
2265 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002266 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002267 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002268 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2269 if (bUseCommonFeatureMask)
2270 zsl_ppmask = commonFeatureMask;
2271 else
2272 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002273 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002274 if (numStreamsOnEncoder > 0)
2275 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2276 else
2277 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002278 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002279 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002280 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002281 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002282 LOGH("Input stream configured, reprocess config");
2283 } else {
2284 //for non zsl streams find out the format
2285 switch (newStream->format) {
2286 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2287 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002288 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002289 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2290 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2291 /* add additional features to pp feature mask */
2292 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2293 mStreamConfigInfo.num_streams);
2294
2295 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2296 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2297 CAM_STREAM_TYPE_VIDEO;
2298 if (m_bTnrEnabled && m_bTnrVideo) {
2299 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2300 CAM_QCOM_FEATURE_CPP_TNR;
2301 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2302 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2303 ~CAM_QCOM_FEATURE_CDS;
2304 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002305 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2306 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2307 CAM_QTI_FEATURE_PPEISCORE;
2308 }
Binhao Line406f062017-05-03 14:39:44 -07002309 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2310 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2311 CAM_QCOM_FEATURE_GOOG_ZOOM;
2312 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002313 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002314 } else {
2315 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2316 CAM_STREAM_TYPE_PREVIEW;
2317 if (m_bTnrEnabled && m_bTnrPreview) {
2318 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2319 CAM_QCOM_FEATURE_CPP_TNR;
2320 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2321 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2322 ~CAM_QCOM_FEATURE_CDS;
2323 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002324 if(!m_bSwTnrPreview) {
2325 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2326 ~CAM_QTI_FEATURE_SW_TNR;
2327 }
Binhao Line406f062017-05-03 14:39:44 -07002328 if (is_goog_zoom_preview_enabled) {
2329 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2330 CAM_QCOM_FEATURE_GOOG_ZOOM;
2331 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002332 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002333 padding_info.width_padding = mSurfaceStridePadding;
2334 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002335 previewSize.width = (int32_t)newStream->width;
2336 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 }
2338 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2339 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2340 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2341 newStream->height;
2342 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2343 newStream->width;
2344 }
2345 }
2346 break;
2347 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002348 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002349 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2350 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2351 if (bUseCommonFeatureMask)
2352 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2353 commonFeatureMask;
2354 else
2355 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2356 CAM_QCOM_FEATURE_NONE;
2357 } else {
2358 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2359 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2360 }
2361 break;
2362 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002363 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002364 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2365 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2366 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2367 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2368 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002369 /* Remove rotation if it is not supported
2370 for 4K LiveVideo snapshot case (online processing) */
2371 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2372 CAM_QCOM_FEATURE_ROTATION)) {
2373 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2374 &= ~CAM_QCOM_FEATURE_ROTATION;
2375 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002376 } else {
2377 if (bUseCommonFeatureMask &&
2378 isOnEncoder(maxViewfinderSize, newStream->width,
2379 newStream->height)) {
2380 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2381 } else {
2382 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2383 }
2384 }
2385 if (isZsl) {
2386 if (zslStream) {
2387 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2388 (int32_t)zslStream->width;
2389 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2390 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002391 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2392 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002393 } else {
2394 LOGE("Error, No ZSL stream identified");
2395 pthread_mutex_unlock(&mMutex);
2396 return -EINVAL;
2397 }
2398 } else if (m_bIs4KVideo) {
2399 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2400 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2401 } else if (bYuv888OverrideJpeg) {
2402 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2403 (int32_t)largeYuv888Size.width;
2404 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2405 (int32_t)largeYuv888Size.height;
2406 }
2407 break;
2408 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2409 case HAL_PIXEL_FORMAT_RAW16:
2410 case HAL_PIXEL_FORMAT_RAW10:
2411 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2412 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2413 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002414 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2415 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2416 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2417 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2418 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2419 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2420 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2421 gCamCapability[mCameraId]->dt[mPDIndex];
2422 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2423 gCamCapability[mCameraId]->vc[mPDIndex];
2424 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002425 break;
2426 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002427 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2430 break;
2431 }
2432 }
2433
2434 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2435 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2436 gCamCapability[mCameraId]->color_arrangement);
2437
2438 if (newStream->priv == NULL) {
2439 //New stream, construct channel
2440 switch (newStream->stream_type) {
2441 case CAMERA3_STREAM_INPUT:
2442 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2443 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2444 break;
2445 case CAMERA3_STREAM_BIDIRECTIONAL:
2446 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2447 GRALLOC_USAGE_HW_CAMERA_WRITE;
2448 break;
2449 case CAMERA3_STREAM_OUTPUT:
2450 /* For video encoding stream, set read/write rarely
2451 * flag so that they may be set to un-cached */
2452 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2453 newStream->usage |=
2454 (GRALLOC_USAGE_SW_READ_RARELY |
2455 GRALLOC_USAGE_SW_WRITE_RARELY |
2456 GRALLOC_USAGE_HW_CAMERA_WRITE);
2457 else if (IS_USAGE_ZSL(newStream->usage))
2458 {
2459 LOGD("ZSL usage flag skipping");
2460 }
2461 else if (newStream == zslStream
2462 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2463 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2464 } else
2465 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2466 break;
2467 default:
2468 LOGE("Invalid stream_type %d", newStream->stream_type);
2469 break;
2470 }
2471
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002472 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002473 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2474 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2475 QCamera3ProcessingChannel *channel = NULL;
2476 switch (newStream->format) {
2477 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2478 if ((newStream->usage &
2479 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2480 (streamList->operation_mode ==
2481 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2482 ) {
2483 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2484 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002485 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002486 this,
2487 newStream,
2488 (cam_stream_type_t)
2489 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2490 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2491 mMetadataChannel,
2492 0); //heap buffers are not required for HFR video channel
2493 if (channel == NULL) {
2494 LOGE("allocation of channel failed");
2495 pthread_mutex_unlock(&mMutex);
2496 return -ENOMEM;
2497 }
2498 //channel->getNumBuffers() will return 0 here so use
2499 //MAX_INFLIGH_HFR_REQUESTS
2500 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2501 newStream->priv = channel;
2502 LOGI("num video buffers in HFR mode: %d",
2503 MAX_INFLIGHT_HFR_REQUESTS);
2504 } else {
2505 /* Copy stream contents in HFR preview only case to create
2506 * dummy batch channel so that sensor streaming is in
2507 * HFR mode */
2508 if (!m_bIsVideo && (streamList->operation_mode ==
2509 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2510 mDummyBatchStream = *newStream;
2511 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002512 int bufferCount = MAX_INFLIGHT_REQUESTS;
2513 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2514 CAM_STREAM_TYPE_VIDEO) {
2515 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2516 bufferCount = MAX_VIDEO_BUFFERS;
2517 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002518 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2519 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002520 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002521 this,
2522 newStream,
2523 (cam_stream_type_t)
2524 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2525 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2526 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002527 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002528 if (channel == NULL) {
2529 LOGE("allocation of channel failed");
2530 pthread_mutex_unlock(&mMutex);
2531 return -ENOMEM;
2532 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002533 /* disable UBWC for preview, though supported,
2534 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002535 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002536 (previewSize.width == (int32_t)videoWidth)&&
2537 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002538 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002539 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002540 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002541 /* When goog_zoom is linked to the preview or video stream,
2542 * disable ubwc to the linked stream */
2543 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2544 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2545 channel->setUBWCEnabled(false);
2546 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002547 newStream->max_buffers = channel->getNumBuffers();
2548 newStream->priv = channel;
2549 }
2550 break;
2551 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2552 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2553 mChannelHandle,
2554 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002555 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002556 this,
2557 newStream,
2558 (cam_stream_type_t)
2559 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2560 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2561 mMetadataChannel);
2562 if (channel == NULL) {
2563 LOGE("allocation of YUV channel failed");
2564 pthread_mutex_unlock(&mMutex);
2565 return -ENOMEM;
2566 }
2567 newStream->max_buffers = channel->getNumBuffers();
2568 newStream->priv = channel;
2569 break;
2570 }
2571 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2572 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002573 case HAL_PIXEL_FORMAT_RAW10: {
2574 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2575 (HAL_DATASPACE_DEPTH != newStream->data_space))
2576 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002577 mRawChannel = new QCamera3RawChannel(
2578 mCameraHandle->camera_handle, mChannelHandle,
2579 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002580 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002581 this, newStream,
2582 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002583 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002584 if (mRawChannel == NULL) {
2585 LOGE("allocation of raw channel failed");
2586 pthread_mutex_unlock(&mMutex);
2587 return -ENOMEM;
2588 }
2589 newStream->max_buffers = mRawChannel->getNumBuffers();
2590 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2591 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002592 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002593 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002594 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2595 mDepthChannel = new QCamera3DepthChannel(
2596 mCameraHandle->camera_handle, mChannelHandle,
2597 mCameraHandle->ops, NULL, NULL, &padding_info,
2598 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2599 mMetadataChannel);
2600 if (NULL == mDepthChannel) {
2601 LOGE("Allocation of depth channel failed");
2602 pthread_mutex_unlock(&mMutex);
2603 return NO_MEMORY;
2604 }
2605 newStream->priv = mDepthChannel;
2606 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2607 } else {
2608 // Max live snapshot inflight buffer is 1. This is to mitigate
2609 // frame drop issues for video snapshot. The more buffers being
2610 // allocated, the more frame drops there are.
2611 mPictureChannel = new QCamera3PicChannel(
2612 mCameraHandle->camera_handle, mChannelHandle,
2613 mCameraHandle->ops, captureResultCb,
2614 setBufferErrorStatus, &padding_info, this, newStream,
2615 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2616 m_bIs4KVideo, isZsl, mMetadataChannel,
2617 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2618 if (mPictureChannel == NULL) {
2619 LOGE("allocation of channel failed");
2620 pthread_mutex_unlock(&mMutex);
2621 return -ENOMEM;
2622 }
2623 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2624 newStream->max_buffers = mPictureChannel->getNumBuffers();
2625 mPictureChannel->overrideYuvSize(
2626 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2627 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002629 break;
2630
2631 default:
2632 LOGE("not a supported format 0x%x", newStream->format);
2633 break;
2634 }
2635 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2636 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2637 } else {
2638 LOGE("Error, Unknown stream type");
2639 pthread_mutex_unlock(&mMutex);
2640 return -EINVAL;
2641 }
2642
2643 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002644 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2645 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002646 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002647 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002648 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2649 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2650 }
2651 }
2652
2653 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2654 it != mStreamInfo.end(); it++) {
2655 if ((*it)->stream == newStream) {
2656 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2657 break;
2658 }
2659 }
2660 } else {
2661 // Channel already exists for this stream
2662 // Do nothing for now
2663 }
2664 padding_info = gCamCapability[mCameraId]->padding_info;
2665
Emilian Peev7650c122017-01-19 08:24:33 -08002666 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002667 * since there is no real stream associated with it
2668 */
Emilian Peev7650c122017-01-19 08:24:33 -08002669 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002670 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2671 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002672 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002673 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002674 }
2675
Binhao Lincdb362a2017-04-20 13:31:54 -07002676 // By default, preview stream TNR is disabled.
2677 // Enable TNR to the preview stream if all conditions below are satisfied:
2678 // 1. resolution <= 1080p.
2679 // 2. preview resolution == video resolution.
2680 // 3. video stream TNR is enabled.
2681 // 4. EIS2.0
2682 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2683 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2684 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2685 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2686 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2687 video_stream->width == preview_stream->width &&
2688 video_stream->height == preview_stream->height) {
2689 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2690 CAM_QCOM_FEATURE_CPP_TNR;
2691 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2692 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2693 ~CAM_QCOM_FEATURE_CDS;
2694 }
2695 }
2696
Thierry Strudel2896d122017-02-23 19:18:03 -08002697 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2698 onlyRaw = false;
2699 }
2700
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002701 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002702 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002703 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002704 cam_analysis_info_t analysisInfo;
2705 int32_t ret = NO_ERROR;
2706 ret = mCommon.getAnalysisInfo(
2707 FALSE,
2708 analysisFeatureMask,
2709 &analysisInfo);
2710 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002711 cam_color_filter_arrangement_t analysis_color_arrangement =
2712 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2713 CAM_FILTER_ARRANGEMENT_Y :
2714 gCamCapability[mCameraId]->color_arrangement);
2715 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2716 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002717 cam_dimension_t analysisDim;
2718 analysisDim = mCommon.getMatchingDimension(previewSize,
2719 analysisInfo.analysis_recommended_res);
2720
2721 mAnalysisChannel = new QCamera3SupportChannel(
2722 mCameraHandle->camera_handle,
2723 mChannelHandle,
2724 mCameraHandle->ops,
2725 &analysisInfo.analysis_padding_info,
2726 analysisFeatureMask,
2727 CAM_STREAM_TYPE_ANALYSIS,
2728 &analysisDim,
2729 (analysisInfo.analysis_format
2730 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2731 : CAM_FORMAT_YUV_420_NV21),
2732 analysisInfo.hw_analysis_supported,
2733 gCamCapability[mCameraId]->color_arrangement,
2734 this,
2735 0); // force buffer count to 0
2736 } else {
2737 LOGW("getAnalysisInfo failed, ret = %d", ret);
2738 }
2739 if (!mAnalysisChannel) {
2740 LOGW("Analysis channel cannot be created");
2741 }
2742 }
2743
Thierry Strudel3d639192016-09-09 11:52:26 -07002744 //RAW DUMP channel
2745 if (mEnableRawDump && isRawStreamRequested == false){
2746 cam_dimension_t rawDumpSize;
2747 rawDumpSize = getMaxRawSize(mCameraId);
2748 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2749 setPAAFSupport(rawDumpFeatureMask,
2750 CAM_STREAM_TYPE_RAW,
2751 gCamCapability[mCameraId]->color_arrangement);
2752 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2753 mChannelHandle,
2754 mCameraHandle->ops,
2755 rawDumpSize,
2756 &padding_info,
2757 this, rawDumpFeatureMask);
2758 if (!mRawDumpChannel) {
2759 LOGE("Raw Dump channel cannot be created");
2760 pthread_mutex_unlock(&mMutex);
2761 return -ENOMEM;
2762 }
2763 }
2764
Thierry Strudel3d639192016-09-09 11:52:26 -07002765 if (mAnalysisChannel) {
2766 cam_analysis_info_t analysisInfo;
2767 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2768 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2769 CAM_STREAM_TYPE_ANALYSIS;
2770 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2771 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002772 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002773 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2774 &analysisInfo);
2775 if (rc != NO_ERROR) {
2776 LOGE("getAnalysisInfo failed, ret = %d", rc);
2777 pthread_mutex_unlock(&mMutex);
2778 return rc;
2779 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002780 cam_color_filter_arrangement_t analysis_color_arrangement =
2781 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2782 CAM_FILTER_ARRANGEMENT_Y :
2783 gCamCapability[mCameraId]->color_arrangement);
2784 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2785 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2786 analysis_color_arrangement);
2787
Thierry Strudel3d639192016-09-09 11:52:26 -07002788 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002789 mCommon.getMatchingDimension(previewSize,
2790 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002791 mStreamConfigInfo.num_streams++;
2792 }
2793
Thierry Strudel2896d122017-02-23 19:18:03 -08002794 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002795 cam_analysis_info_t supportInfo;
2796 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2797 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2798 setPAAFSupport(callbackFeatureMask,
2799 CAM_STREAM_TYPE_CALLBACK,
2800 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002801 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002802 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002803 if (ret != NO_ERROR) {
2804 /* Ignore the error for Mono camera
2805 * because the PAAF bit mask is only set
2806 * for CAM_STREAM_TYPE_ANALYSIS stream type
2807 */
2808 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2809 LOGW("getAnalysisInfo failed, ret = %d", ret);
2810 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002811 }
2812 mSupportChannel = new QCamera3SupportChannel(
2813 mCameraHandle->camera_handle,
2814 mChannelHandle,
2815 mCameraHandle->ops,
2816 &gCamCapability[mCameraId]->padding_info,
2817 callbackFeatureMask,
2818 CAM_STREAM_TYPE_CALLBACK,
2819 &QCamera3SupportChannel::kDim,
2820 CAM_FORMAT_YUV_420_NV21,
2821 supportInfo.hw_analysis_supported,
2822 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002823 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002824 if (!mSupportChannel) {
2825 LOGE("dummy channel cannot be created");
2826 pthread_mutex_unlock(&mMutex);
2827 return -ENOMEM;
2828 }
2829 }
2830
2831 if (mSupportChannel) {
2832 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2833 QCamera3SupportChannel::kDim;
2834 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2835 CAM_STREAM_TYPE_CALLBACK;
2836 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2837 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2838 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2839 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2840 gCamCapability[mCameraId]->color_arrangement);
2841 mStreamConfigInfo.num_streams++;
2842 }
2843
2844 if (mRawDumpChannel) {
2845 cam_dimension_t rawSize;
2846 rawSize = getMaxRawSize(mCameraId);
2847 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2848 rawSize;
2849 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2850 CAM_STREAM_TYPE_RAW;
2851 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2852 CAM_QCOM_FEATURE_NONE;
2853 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2854 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2855 gCamCapability[mCameraId]->color_arrangement);
2856 mStreamConfigInfo.num_streams++;
2857 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002858
2859 if (mHdrPlusRawSrcChannel) {
2860 cam_dimension_t rawSize;
2861 rawSize = getMaxRawSize(mCameraId);
2862 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2863 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2864 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2865 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2866 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2867 gCamCapability[mCameraId]->color_arrangement);
2868 mStreamConfigInfo.num_streams++;
2869 }
2870
Thierry Strudel3d639192016-09-09 11:52:26 -07002871 /* In HFR mode, if video stream is not added, create a dummy channel so that
2872 * ISP can create a batch mode even for preview only case. This channel is
2873 * never 'start'ed (no stream-on), it is only 'initialized' */
2874 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2875 !m_bIsVideo) {
2876 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2877 setPAAFSupport(dummyFeatureMask,
2878 CAM_STREAM_TYPE_VIDEO,
2879 gCamCapability[mCameraId]->color_arrangement);
2880 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2881 mChannelHandle,
2882 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002883 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002884 this,
2885 &mDummyBatchStream,
2886 CAM_STREAM_TYPE_VIDEO,
2887 dummyFeatureMask,
2888 mMetadataChannel);
2889 if (NULL == mDummyBatchChannel) {
2890 LOGE("creation of mDummyBatchChannel failed."
2891 "Preview will use non-hfr sensor mode ");
2892 }
2893 }
2894 if (mDummyBatchChannel) {
2895 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2896 mDummyBatchStream.width;
2897 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2898 mDummyBatchStream.height;
2899 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2900 CAM_STREAM_TYPE_VIDEO;
2901 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2902 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2903 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2904 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2905 gCamCapability[mCameraId]->color_arrangement);
2906 mStreamConfigInfo.num_streams++;
2907 }
2908
2909 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2910 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002911 m_bIs4KVideo ? 0 :
2912 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002913
2914 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2915 for (pendingRequestIterator i = mPendingRequestsList.begin();
2916 i != mPendingRequestsList.end();) {
2917 i = erasePendingRequest(i);
2918 }
2919 mPendingFrameDropList.clear();
2920 // Initialize/Reset the pending buffers list
2921 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2922 req.mPendingBufferList.clear();
2923 }
2924 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2925
Thierry Strudel3d639192016-09-09 11:52:26 -07002926 mCurJpegMeta.clear();
2927 //Get min frame duration for this streams configuration
2928 deriveMinFrameDuration();
2929
Chien-Yu Chenee335912017-02-09 17:53:20 -08002930 mFirstPreviewIntentSeen = false;
2931
2932 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002933 {
2934 Mutex::Autolock l(gHdrPlusClientLock);
2935 disableHdrPlusModeLocked();
2936 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002937
Thierry Strudel3d639192016-09-09 11:52:26 -07002938 // Update state
2939 mState = CONFIGURED;
2940
2941 pthread_mutex_unlock(&mMutex);
2942
2943 return rc;
2944}
2945
2946/*===========================================================================
2947 * FUNCTION : validateCaptureRequest
2948 *
2949 * DESCRIPTION: validate a capture request from camera service
2950 *
2951 * PARAMETERS :
2952 * @request : request from framework to process
2953 *
2954 * RETURN :
2955 *
2956 *==========================================================================*/
2957int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002958 camera3_capture_request_t *request,
2959 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002960{
2961 ssize_t idx = 0;
2962 const camera3_stream_buffer_t *b;
2963 CameraMetadata meta;
2964
2965 /* Sanity check the request */
2966 if (request == NULL) {
2967 LOGE("NULL capture request");
2968 return BAD_VALUE;
2969 }
2970
2971 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2972 /*settings cannot be null for the first request*/
2973 return BAD_VALUE;
2974 }
2975
2976 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002977 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2978 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002979 LOGE("Request %d: No output buffers provided!",
2980 __FUNCTION__, frameNumber);
2981 return BAD_VALUE;
2982 }
2983 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2984 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2985 request->num_output_buffers, MAX_NUM_STREAMS);
2986 return BAD_VALUE;
2987 }
2988 if (request->input_buffer != NULL) {
2989 b = request->input_buffer;
2990 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2991 LOGE("Request %d: Buffer %ld: Status not OK!",
2992 frameNumber, (long)idx);
2993 return BAD_VALUE;
2994 }
2995 if (b->release_fence != -1) {
2996 LOGE("Request %d: Buffer %ld: Has a release fence!",
2997 frameNumber, (long)idx);
2998 return BAD_VALUE;
2999 }
3000 if (b->buffer == NULL) {
3001 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3002 frameNumber, (long)idx);
3003 return BAD_VALUE;
3004 }
3005 }
3006
3007 // Validate all buffers
3008 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003009 if (b == NULL) {
3010 return BAD_VALUE;
3011 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003012 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003013 QCamera3ProcessingChannel *channel =
3014 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3015 if (channel == NULL) {
3016 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3017 frameNumber, (long)idx);
3018 return BAD_VALUE;
3019 }
3020 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3021 LOGE("Request %d: Buffer %ld: Status not OK!",
3022 frameNumber, (long)idx);
3023 return BAD_VALUE;
3024 }
3025 if (b->release_fence != -1) {
3026 LOGE("Request %d: Buffer %ld: Has a release fence!",
3027 frameNumber, (long)idx);
3028 return BAD_VALUE;
3029 }
3030 if (b->buffer == NULL) {
3031 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3032 frameNumber, (long)idx);
3033 return BAD_VALUE;
3034 }
3035 if (*(b->buffer) == NULL) {
3036 LOGE("Request %d: Buffer %ld: NULL private handle!",
3037 frameNumber, (long)idx);
3038 return BAD_VALUE;
3039 }
3040 idx++;
3041 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003042 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003043 return NO_ERROR;
3044}
3045
3046/*===========================================================================
3047 * FUNCTION : deriveMinFrameDuration
3048 *
3049 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3050 * on currently configured streams.
3051 *
3052 * PARAMETERS : NONE
3053 *
3054 * RETURN : NONE
3055 *
3056 *==========================================================================*/
3057void QCamera3HardwareInterface::deriveMinFrameDuration()
3058{
3059 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3060
3061 maxJpegDim = 0;
3062 maxProcessedDim = 0;
3063 maxRawDim = 0;
3064
3065 // Figure out maximum jpeg, processed, and raw dimensions
3066 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3067 it != mStreamInfo.end(); it++) {
3068
3069 // Input stream doesn't have valid stream_type
3070 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3071 continue;
3072
3073 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3074 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3075 if (dimension > maxJpegDim)
3076 maxJpegDim = dimension;
3077 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3078 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3079 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3080 if (dimension > maxRawDim)
3081 maxRawDim = dimension;
3082 } else {
3083 if (dimension > maxProcessedDim)
3084 maxProcessedDim = dimension;
3085 }
3086 }
3087
3088 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3089 MAX_SIZES_CNT);
3090
3091 //Assume all jpeg dimensions are in processed dimensions.
3092 if (maxJpegDim > maxProcessedDim)
3093 maxProcessedDim = maxJpegDim;
3094 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3095 if (maxProcessedDim > maxRawDim) {
3096 maxRawDim = INT32_MAX;
3097
3098 for (size_t i = 0; i < count; i++) {
3099 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3100 gCamCapability[mCameraId]->raw_dim[i].height;
3101 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3102 maxRawDim = dimension;
3103 }
3104 }
3105
3106 //Find minimum durations for processed, jpeg, and raw
3107 for (size_t i = 0; i < count; i++) {
3108 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3109 gCamCapability[mCameraId]->raw_dim[i].height) {
3110 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3111 break;
3112 }
3113 }
3114 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3115 for (size_t i = 0; i < count; i++) {
3116 if (maxProcessedDim ==
3117 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3118 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3119 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3120 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3121 break;
3122 }
3123 }
3124}
3125
3126/*===========================================================================
3127 * FUNCTION : getMinFrameDuration
3128 *
3129 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3130 * and current request configuration.
3131 *
3132 * PARAMETERS : @request: requset sent by the frameworks
3133 *
3134 * RETURN : min farme duration for a particular request
3135 *
3136 *==========================================================================*/
3137int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3138{
3139 bool hasJpegStream = false;
3140 bool hasRawStream = false;
3141 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3142 const camera3_stream_t *stream = request->output_buffers[i].stream;
3143 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3144 hasJpegStream = true;
3145 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3146 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3147 stream->format == HAL_PIXEL_FORMAT_RAW16)
3148 hasRawStream = true;
3149 }
3150
3151 if (!hasJpegStream)
3152 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3153 else
3154 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3155}
3156
3157/*===========================================================================
3158 * FUNCTION : handleBuffersDuringFlushLock
3159 *
3160 * DESCRIPTION: Account for buffers returned from back-end during flush
3161 * This function is executed while mMutex is held by the caller.
3162 *
3163 * PARAMETERS :
3164 * @buffer: image buffer for the callback
3165 *
3166 * RETURN :
3167 *==========================================================================*/
3168void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3169{
3170 bool buffer_found = false;
3171 for (List<PendingBuffersInRequest>::iterator req =
3172 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3173 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3174 for (List<PendingBufferInfo>::iterator i =
3175 req->mPendingBufferList.begin();
3176 i != req->mPendingBufferList.end(); i++) {
3177 if (i->buffer == buffer->buffer) {
3178 mPendingBuffersMap.numPendingBufsAtFlush--;
3179 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3180 buffer->buffer, req->frame_number,
3181 mPendingBuffersMap.numPendingBufsAtFlush);
3182 buffer_found = true;
3183 break;
3184 }
3185 }
3186 if (buffer_found) {
3187 break;
3188 }
3189 }
3190 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3191 //signal the flush()
3192 LOGD("All buffers returned to HAL. Continue flush");
3193 pthread_cond_signal(&mBuffersCond);
3194 }
3195}
3196
Thierry Strudel3d639192016-09-09 11:52:26 -07003197/*===========================================================================
3198 * FUNCTION : handleBatchMetadata
3199 *
3200 * DESCRIPTION: Handles metadata buffer callback in batch mode
3201 *
3202 * PARAMETERS : @metadata_buf: metadata buffer
3203 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3204 * the meta buf in this method
3205 *
3206 * RETURN :
3207 *
3208 *==========================================================================*/
3209void QCamera3HardwareInterface::handleBatchMetadata(
3210 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3211{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003212 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003213
3214 if (NULL == metadata_buf) {
3215 LOGE("metadata_buf is NULL");
3216 return;
3217 }
3218 /* In batch mode, the metdata will contain the frame number and timestamp of
3219 * the last frame in the batch. Eg: a batch containing buffers from request
3220 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3221 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3222 * multiple process_capture_results */
3223 metadata_buffer_t *metadata =
3224 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3225 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3226 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3227 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3228 uint32_t frame_number = 0, urgent_frame_number = 0;
3229 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3230 bool invalid_metadata = false;
3231 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3232 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003233 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003234
3235 int32_t *p_frame_number_valid =
3236 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3237 uint32_t *p_frame_number =
3238 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3239 int64_t *p_capture_time =
3240 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3241 int32_t *p_urgent_frame_number_valid =
3242 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3243 uint32_t *p_urgent_frame_number =
3244 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3245
3246 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3247 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3248 (NULL == p_urgent_frame_number)) {
3249 LOGE("Invalid metadata");
3250 invalid_metadata = true;
3251 } else {
3252 frame_number_valid = *p_frame_number_valid;
3253 last_frame_number = *p_frame_number;
3254 last_frame_capture_time = *p_capture_time;
3255 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3256 last_urgent_frame_number = *p_urgent_frame_number;
3257 }
3258
3259 /* In batchmode, when no video buffers are requested, set_parms are sent
3260 * for every capture_request. The difference between consecutive urgent
3261 * frame numbers and frame numbers should be used to interpolate the
3262 * corresponding frame numbers and time stamps */
3263 pthread_mutex_lock(&mMutex);
3264 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003265 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3266 if(idx < 0) {
3267 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3268 last_urgent_frame_number);
3269 mState = ERROR;
3270 pthread_mutex_unlock(&mMutex);
3271 return;
3272 }
3273 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003274 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3275 first_urgent_frame_number;
3276
3277 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3278 urgent_frame_number_valid,
3279 first_urgent_frame_number, last_urgent_frame_number);
3280 }
3281
3282 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003283 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3284 if(idx < 0) {
3285 LOGE("Invalid frame number received: %d. Irrecoverable error",
3286 last_frame_number);
3287 mState = ERROR;
3288 pthread_mutex_unlock(&mMutex);
3289 return;
3290 }
3291 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003292 frameNumDiff = last_frame_number + 1 -
3293 first_frame_number;
3294 mPendingBatchMap.removeItem(last_frame_number);
3295
3296 LOGD("frm: valid: %d frm_num: %d - %d",
3297 frame_number_valid,
3298 first_frame_number, last_frame_number);
3299
3300 }
3301 pthread_mutex_unlock(&mMutex);
3302
3303 if (urgent_frame_number_valid || frame_number_valid) {
3304 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3305 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3306 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3307 urgentFrameNumDiff, last_urgent_frame_number);
3308 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3309 LOGE("frameNumDiff: %d frameNum: %d",
3310 frameNumDiff, last_frame_number);
3311 }
3312
3313 for (size_t i = 0; i < loopCount; i++) {
3314 /* handleMetadataWithLock is called even for invalid_metadata for
3315 * pipeline depth calculation */
3316 if (!invalid_metadata) {
3317 /* Infer frame number. Batch metadata contains frame number of the
3318 * last frame */
3319 if (urgent_frame_number_valid) {
3320 if (i < urgentFrameNumDiff) {
3321 urgent_frame_number =
3322 first_urgent_frame_number + i;
3323 LOGD("inferred urgent frame_number: %d",
3324 urgent_frame_number);
3325 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3326 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3327 } else {
3328 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3329 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3330 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3331 }
3332 }
3333
3334 /* Infer frame number. Batch metadata contains frame number of the
3335 * last frame */
3336 if (frame_number_valid) {
3337 if (i < frameNumDiff) {
3338 frame_number = first_frame_number + i;
3339 LOGD("inferred frame_number: %d", frame_number);
3340 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3341 CAM_INTF_META_FRAME_NUMBER, frame_number);
3342 } else {
3343 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3344 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3345 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3346 }
3347 }
3348
3349 if (last_frame_capture_time) {
3350 //Infer timestamp
3351 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003352 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003353 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003354 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003355 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3356 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3357 LOGD("batch capture_time: %lld, capture_time: %lld",
3358 last_frame_capture_time, capture_time);
3359 }
3360 }
3361 pthread_mutex_lock(&mMutex);
3362 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003363 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003364 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3365 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003366 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003367 pthread_mutex_unlock(&mMutex);
3368 }
3369
3370 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003371 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003372 mMetadataChannel->bufDone(metadata_buf);
3373 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003374 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003375 }
3376}
3377
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003378void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3379 camera3_error_msg_code_t errorCode)
3380{
3381 camera3_notify_msg_t notify_msg;
3382 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3383 notify_msg.type = CAMERA3_MSG_ERROR;
3384 notify_msg.message.error.error_code = errorCode;
3385 notify_msg.message.error.error_stream = NULL;
3386 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003387 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003388
3389 return;
3390}
Thierry Strudel3d639192016-09-09 11:52:26 -07003391/*===========================================================================
3392 * FUNCTION : handleMetadataWithLock
3393 *
3394 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3395 *
3396 * PARAMETERS : @metadata_buf: metadata buffer
3397 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3398 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003399 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3400 * last urgent metadata in a batch. Always true for non-batch mode
3401 * @lastMetadataInBatch: Boolean to indicate whether this is the
3402 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003403 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3404 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003405 *
3406 * RETURN :
3407 *
3408 *==========================================================================*/
3409void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003410 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003411 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3412 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003413{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003414 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003415 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3416 //during flush do not send metadata from this thread
3417 LOGD("not sending metadata during flush or when mState is error");
3418 if (free_and_bufdone_meta_buf) {
3419 mMetadataChannel->bufDone(metadata_buf);
3420 free(metadata_buf);
3421 }
3422 return;
3423 }
3424
3425 //not in flush
3426 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3427 int32_t frame_number_valid, urgent_frame_number_valid;
3428 uint32_t frame_number, urgent_frame_number;
3429 int64_t capture_time;
3430 nsecs_t currentSysTime;
3431
3432 int32_t *p_frame_number_valid =
3433 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3434 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3435 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3436 int32_t *p_urgent_frame_number_valid =
3437 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3438 uint32_t *p_urgent_frame_number =
3439 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3440 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3441 metadata) {
3442 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3443 *p_frame_number_valid, *p_frame_number);
3444 }
3445
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003446 camera_metadata_t *resultMetadata = nullptr;
3447
Thierry Strudel3d639192016-09-09 11:52:26 -07003448 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3449 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3450 LOGE("Invalid metadata");
3451 if (free_and_bufdone_meta_buf) {
3452 mMetadataChannel->bufDone(metadata_buf);
3453 free(metadata_buf);
3454 }
3455 goto done_metadata;
3456 }
3457 frame_number_valid = *p_frame_number_valid;
3458 frame_number = *p_frame_number;
3459 capture_time = *p_capture_time;
3460 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3461 urgent_frame_number = *p_urgent_frame_number;
3462 currentSysTime = systemTime(CLOCK_MONOTONIC);
3463
3464 // Detect if buffers from any requests are overdue
3465 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003466 int64_t timeout;
3467 {
3468 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3469 // If there is a pending HDR+ request, the following requests may be blocked until the
3470 // HDR+ request is done. So allow a longer timeout.
3471 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3472 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3473 }
3474
3475 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003476 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003477 assert(missed.stream->priv);
3478 if (missed.stream->priv) {
3479 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3480 assert(ch->mStreams[0]);
3481 if (ch->mStreams[0]) {
3482 LOGE("Cancel missing frame = %d, buffer = %p,"
3483 "stream type = %d, stream format = %d",
3484 req.frame_number, missed.buffer,
3485 ch->mStreams[0]->getMyType(), missed.stream->format);
3486 ch->timeoutFrame(req.frame_number);
3487 }
3488 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003489 }
3490 }
3491 }
3492 //Partial result on process_capture_result for timestamp
3493 if (urgent_frame_number_valid) {
3494 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3495 urgent_frame_number, capture_time);
3496
3497 //Recieved an urgent Frame Number, handle it
3498 //using partial results
3499 for (pendingRequestIterator i =
3500 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3501 LOGD("Iterator Frame = %d urgent frame = %d",
3502 i->frame_number, urgent_frame_number);
3503
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003504 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003505 (i->partial_result_cnt == 0)) {
3506 LOGE("Error: HAL missed urgent metadata for frame number %d",
3507 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003508 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003509 }
3510
3511 if (i->frame_number == urgent_frame_number &&
3512 i->bUrgentReceived == 0) {
3513
3514 camera3_capture_result_t result;
3515 memset(&result, 0, sizeof(camera3_capture_result_t));
3516
3517 i->partial_result_cnt++;
3518 i->bUrgentReceived = 1;
3519 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003520 result.result = translateCbUrgentMetadataToResultMetadata(
3521 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003522 // Populate metadata result
3523 result.frame_number = urgent_frame_number;
3524 result.num_output_buffers = 0;
3525 result.output_buffers = NULL;
3526 result.partial_result = i->partial_result_cnt;
3527
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003528 {
3529 Mutex::Autolock l(gHdrPlusClientLock);
3530 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3531 // Notify HDR+ client about the partial metadata.
3532 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3533 result.partial_result == PARTIAL_RESULT_COUNT);
3534 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003535 }
3536
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003537 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003538 LOGD("urgent frame_number = %u, capture_time = %lld",
3539 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003540 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3541 // Instant AEC settled for this frame.
3542 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3543 mInstantAECSettledFrameNumber = urgent_frame_number;
3544 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003545 free_camera_metadata((camera_metadata_t *)result.result);
3546 break;
3547 }
3548 }
3549 }
3550
3551 if (!frame_number_valid) {
3552 LOGD("Not a valid normal frame number, used as SOF only");
3553 if (free_and_bufdone_meta_buf) {
3554 mMetadataChannel->bufDone(metadata_buf);
3555 free(metadata_buf);
3556 }
3557 goto done_metadata;
3558 }
3559 LOGH("valid frame_number = %u, capture_time = %lld",
3560 frame_number, capture_time);
3561
Emilian Peev7650c122017-01-19 08:24:33 -08003562 if (metadata->is_depth_data_valid) {
3563 handleDepthDataLocked(metadata->depth_data, frame_number);
3564 }
3565
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003566 // Check whether any stream buffer corresponding to this is dropped or not
3567 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3568 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3569 for (auto & pendingRequest : mPendingRequestsList) {
3570 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3571 mInstantAECSettledFrameNumber)) {
3572 camera3_notify_msg_t notify_msg = {};
3573 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003574 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003575 QCamera3ProcessingChannel *channel =
3576 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003577 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003578 if (p_cam_frame_drop) {
3579 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003580 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003581 // Got the stream ID for drop frame.
3582 dropFrame = true;
3583 break;
3584 }
3585 }
3586 } else {
3587 // This is instant AEC case.
3588 // For instant AEC drop the stream untill AEC is settled.
3589 dropFrame = true;
3590 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003591
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003592 if (dropFrame) {
3593 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3594 if (p_cam_frame_drop) {
3595 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003596 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003597 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003598 } else {
3599 // For instant AEC, inform frame drop and frame number
3600 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3601 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003602 pendingRequest.frame_number, streamID,
3603 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003604 }
3605 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003606 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003607 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003608 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003609 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003610 if (p_cam_frame_drop) {
3611 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003612 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003613 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003614 } else {
3615 // For instant AEC, inform frame drop and frame number
3616 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3617 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003618 pendingRequest.frame_number, streamID,
3619 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003620 }
3621 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003622 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003623 PendingFrameDrop.stream_ID = streamID;
3624 // Add the Frame drop info to mPendingFrameDropList
3625 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003626 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003627 }
3628 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003629 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003630
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003631 for (auto & pendingRequest : mPendingRequestsList) {
3632 // Find the pending request with the frame number.
3633 if (pendingRequest.frame_number == frame_number) {
3634 // Update the sensor timestamp.
3635 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003636
Thierry Strudel3d639192016-09-09 11:52:26 -07003637
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003638 /* Set the timestamp in display metadata so that clients aware of
3639 private_handle such as VT can use this un-modified timestamps.
3640 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003641 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003642
Thierry Strudel3d639192016-09-09 11:52:26 -07003643 // Find channel requiring metadata, meaning internal offline postprocess
3644 // is needed.
3645 //TODO: for now, we don't support two streams requiring metadata at the same time.
3646 // (because we are not making copies, and metadata buffer is not reference counted.
3647 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003648 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3649 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003650 if (iter->need_metadata) {
3651 internalPproc = true;
3652 QCamera3ProcessingChannel *channel =
3653 (QCamera3ProcessingChannel *)iter->stream->priv;
3654 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003655 if(p_is_metabuf_queued != NULL) {
3656 *p_is_metabuf_queued = true;
3657 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003658 break;
3659 }
3660 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003661 for (auto itr = pendingRequest.internalRequestList.begin();
3662 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003663 if (itr->need_metadata) {
3664 internalPproc = true;
3665 QCamera3ProcessingChannel *channel =
3666 (QCamera3ProcessingChannel *)itr->stream->priv;
3667 channel->queueReprocMetadata(metadata_buf);
3668 break;
3669 }
3670 }
3671
Thierry Strudel54dc9782017-02-15 12:12:10 -08003672 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003673
3674 bool *enableZsl = nullptr;
3675 if (gExposeEnableZslKey) {
3676 enableZsl = &pendingRequest.enableZsl;
3677 }
3678
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003679 resultMetadata = translateFromHalMetadata(metadata,
3680 pendingRequest.timestamp, pendingRequest.request_id,
3681 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3682 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003683 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003684 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003685 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003686 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003687 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003688 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003689
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003690 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003691
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003692 if (pendingRequest.blob_request) {
3693 //Dump tuning metadata if enabled and available
3694 char prop[PROPERTY_VALUE_MAX];
3695 memset(prop, 0, sizeof(prop));
3696 property_get("persist.camera.dumpmetadata", prop, "0");
3697 int32_t enabled = atoi(prop);
3698 if (enabled && metadata->is_tuning_params_valid) {
3699 dumpMetadataToFile(metadata->tuning_params,
3700 mMetaFrameCount,
3701 enabled,
3702 "Snapshot",
3703 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003704 }
3705 }
3706
3707 if (!internalPproc) {
3708 LOGD("couldn't find need_metadata for this metadata");
3709 // Return metadata buffer
3710 if (free_and_bufdone_meta_buf) {
3711 mMetadataChannel->bufDone(metadata_buf);
3712 free(metadata_buf);
3713 }
3714 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003715
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003716 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003717 }
3718 }
3719
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003720 // Try to send out shutter callbacks and capture results.
3721 handlePendingResultsWithLock(frame_number, resultMetadata);
3722 return;
3723
Thierry Strudel3d639192016-09-09 11:52:26 -07003724done_metadata:
3725 for (pendingRequestIterator i = mPendingRequestsList.begin();
3726 i != mPendingRequestsList.end() ;i++) {
3727 i->pipeline_depth++;
3728 }
3729 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3730 unblockRequestIfNecessary();
3731}
3732
3733/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003734 * FUNCTION : handleDepthDataWithLock
3735 *
3736 * DESCRIPTION: Handles incoming depth data
3737 *
3738 * PARAMETERS : @depthData : Depth data
3739 * @frameNumber: Frame number of the incoming depth data
3740 *
3741 * RETURN :
3742 *
3743 *==========================================================================*/
3744void QCamera3HardwareInterface::handleDepthDataLocked(
3745 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3746 uint32_t currentFrameNumber;
3747 buffer_handle_t *depthBuffer;
3748
3749 if (nullptr == mDepthChannel) {
3750 LOGE("Depth channel not present!");
3751 return;
3752 }
3753
3754 camera3_stream_buffer_t resultBuffer =
3755 {.acquire_fence = -1,
3756 .release_fence = -1,
3757 .status = CAMERA3_BUFFER_STATUS_OK,
3758 .buffer = nullptr,
3759 .stream = mDepthChannel->getStream()};
3760 camera3_capture_result_t result =
3761 {.result = nullptr,
3762 .num_output_buffers = 1,
3763 .output_buffers = &resultBuffer,
3764 .partial_result = 0,
3765 .frame_number = 0};
3766
3767 do {
3768 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3769 if (nullptr == depthBuffer) {
3770 break;
3771 }
3772
3773 result.frame_number = currentFrameNumber;
3774 resultBuffer.buffer = depthBuffer;
3775 if (currentFrameNumber == frameNumber) {
3776 int32_t rc = mDepthChannel->populateDepthData(depthData,
3777 frameNumber);
3778 if (NO_ERROR != rc) {
3779 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3780 } else {
3781 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3782 }
3783 } else if (currentFrameNumber > frameNumber) {
3784 break;
3785 } else {
3786 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3787 {{currentFrameNumber, mDepthChannel->getStream(),
3788 CAMERA3_MSG_ERROR_BUFFER}}};
3789 orchestrateNotify(&notify_msg);
3790
3791 LOGE("Depth buffer for frame number: %d is missing "
3792 "returning back!", currentFrameNumber);
3793 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3794 }
3795 mDepthChannel->unmapBuffer(currentFrameNumber);
3796
3797 orchestrateResult(&result);
3798 } while (currentFrameNumber < frameNumber);
3799}
3800
3801/*===========================================================================
3802 * FUNCTION : notifyErrorFoPendingDepthData
3803 *
3804 * DESCRIPTION: Returns error for any pending depth buffers
3805 *
3806 * PARAMETERS : depthCh - depth channel that needs to get flushed
3807 *
3808 * RETURN :
3809 *
3810 *==========================================================================*/
3811void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3812 QCamera3DepthChannel *depthCh) {
3813 uint32_t currentFrameNumber;
3814 buffer_handle_t *depthBuffer;
3815
3816 if (nullptr == depthCh) {
3817 return;
3818 }
3819
3820 camera3_notify_msg_t notify_msg =
3821 {.type = CAMERA3_MSG_ERROR,
3822 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3823 camera3_stream_buffer_t resultBuffer =
3824 {.acquire_fence = -1,
3825 .release_fence = -1,
3826 .buffer = nullptr,
3827 .stream = depthCh->getStream(),
3828 .status = CAMERA3_BUFFER_STATUS_ERROR};
3829 camera3_capture_result_t result =
3830 {.result = nullptr,
3831 .frame_number = 0,
3832 .num_output_buffers = 1,
3833 .partial_result = 0,
3834 .output_buffers = &resultBuffer};
3835
3836 while (nullptr !=
3837 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3838 depthCh->unmapBuffer(currentFrameNumber);
3839
3840 notify_msg.message.error.frame_number = currentFrameNumber;
3841 orchestrateNotify(&notify_msg);
3842
3843 resultBuffer.buffer = depthBuffer;
3844 result.frame_number = currentFrameNumber;
3845 orchestrateResult(&result);
3846 };
3847}
3848
3849/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003850 * FUNCTION : hdrPlusPerfLock
3851 *
3852 * DESCRIPTION: perf lock for HDR+ using custom intent
3853 *
3854 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3855 *
3856 * RETURN : None
3857 *
3858 *==========================================================================*/
3859void QCamera3HardwareInterface::hdrPlusPerfLock(
3860 mm_camera_super_buf_t *metadata_buf)
3861{
3862 if (NULL == metadata_buf) {
3863 LOGE("metadata_buf is NULL");
3864 return;
3865 }
3866 metadata_buffer_t *metadata =
3867 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3868 int32_t *p_frame_number_valid =
3869 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3870 uint32_t *p_frame_number =
3871 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3872
3873 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3874 LOGE("%s: Invalid metadata", __func__);
3875 return;
3876 }
3877
3878 //acquire perf lock for 5 sec after the last HDR frame is captured
3879 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3880 if ((p_frame_number != NULL) &&
3881 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003882 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003883 }
3884 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003885}
3886
3887/*===========================================================================
3888 * FUNCTION : handleInputBufferWithLock
3889 *
3890 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3891 *
3892 * PARAMETERS : @frame_number: frame number of the input buffer
3893 *
3894 * RETURN :
3895 *
3896 *==========================================================================*/
3897void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3898{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003899 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003900 pendingRequestIterator i = mPendingRequestsList.begin();
3901 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3902 i++;
3903 }
3904 if (i != mPendingRequestsList.end() && i->input_buffer) {
3905 //found the right request
3906 if (!i->shutter_notified) {
3907 CameraMetadata settings;
3908 camera3_notify_msg_t notify_msg;
3909 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3910 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3911 if(i->settings) {
3912 settings = i->settings;
3913 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3914 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3915 } else {
3916 LOGE("No timestamp in input settings! Using current one.");
3917 }
3918 } else {
3919 LOGE("Input settings missing!");
3920 }
3921
3922 notify_msg.type = CAMERA3_MSG_SHUTTER;
3923 notify_msg.message.shutter.frame_number = frame_number;
3924 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003925 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003926 i->shutter_notified = true;
3927 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3928 i->frame_number, notify_msg.message.shutter.timestamp);
3929 }
3930
3931 if (i->input_buffer->release_fence != -1) {
3932 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3933 close(i->input_buffer->release_fence);
3934 if (rc != OK) {
3935 LOGE("input buffer sync wait failed %d", rc);
3936 }
3937 }
3938
3939 camera3_capture_result result;
3940 memset(&result, 0, sizeof(camera3_capture_result));
3941 result.frame_number = frame_number;
3942 result.result = i->settings;
3943 result.input_buffer = i->input_buffer;
3944 result.partial_result = PARTIAL_RESULT_COUNT;
3945
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003946 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003947 LOGD("Input request metadata and input buffer frame_number = %u",
3948 i->frame_number);
3949 i = erasePendingRequest(i);
3950 } else {
3951 LOGE("Could not find input request for frame number %d", frame_number);
3952 }
3953}
3954
3955/*===========================================================================
3956 * FUNCTION : handleBufferWithLock
3957 *
3958 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3959 *
3960 * PARAMETERS : @buffer: image buffer for the callback
3961 * @frame_number: frame number of the image buffer
3962 *
3963 * RETURN :
3964 *
3965 *==========================================================================*/
3966void QCamera3HardwareInterface::handleBufferWithLock(
3967 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3968{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003969 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003970
3971 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3972 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3973 }
3974
Thierry Strudel3d639192016-09-09 11:52:26 -07003975 /* Nothing to be done during error state */
3976 if ((ERROR == mState) || (DEINIT == mState)) {
3977 return;
3978 }
3979 if (mFlushPerf) {
3980 handleBuffersDuringFlushLock(buffer);
3981 return;
3982 }
3983 //not in flush
3984 // If the frame number doesn't exist in the pending request list,
3985 // directly send the buffer to the frameworks, and update pending buffers map
3986 // Otherwise, book-keep the buffer.
3987 pendingRequestIterator i = mPendingRequestsList.begin();
3988 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3989 i++;
3990 }
3991 if (i == mPendingRequestsList.end()) {
3992 // Verify all pending requests frame_numbers are greater
3993 for (pendingRequestIterator j = mPendingRequestsList.begin();
3994 j != mPendingRequestsList.end(); j++) {
3995 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3996 LOGW("Error: pending live frame number %d is smaller than %d",
3997 j->frame_number, frame_number);
3998 }
3999 }
4000 camera3_capture_result_t result;
4001 memset(&result, 0, sizeof(camera3_capture_result_t));
4002 result.result = NULL;
4003 result.frame_number = frame_number;
4004 result.num_output_buffers = 1;
4005 result.partial_result = 0;
4006 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4007 m != mPendingFrameDropList.end(); m++) {
4008 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4009 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4010 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4011 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4012 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4013 frame_number, streamID);
4014 m = mPendingFrameDropList.erase(m);
4015 break;
4016 }
4017 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004018 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07004019 result.output_buffers = buffer;
4020 LOGH("result frame_number = %d, buffer = %p",
4021 frame_number, buffer->buffer);
4022
4023 mPendingBuffersMap.removeBuf(buffer->buffer);
4024
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004025 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004026 } else {
4027 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004028 if (i->input_buffer->release_fence != -1) {
4029 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4030 close(i->input_buffer->release_fence);
4031 if (rc != OK) {
4032 LOGE("input buffer sync wait failed %d", rc);
4033 }
4034 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004035 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004036
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004037 // Put buffer into the pending request
4038 for (auto &requestedBuffer : i->buffers) {
4039 if (requestedBuffer.stream == buffer->stream) {
4040 if (requestedBuffer.buffer != nullptr) {
4041 LOGE("Error: buffer is already set");
4042 } else {
4043 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4044 sizeof(camera3_stream_buffer_t));
4045 *(requestedBuffer.buffer) = *buffer;
4046 LOGH("cache buffer %p at result frame_number %u",
4047 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004048 }
4049 }
4050 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004051
4052 if (i->input_buffer) {
4053 // For a reprocessing request, try to send out shutter callback and result metadata.
4054 handlePendingResultsWithLock(frame_number, nullptr);
4055 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004056 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004057
4058 if (mPreviewStarted == false) {
4059 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4060 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004061 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4062
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004063 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4064 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4065 mPreviewStarted = true;
4066
4067 // Set power hint for preview
4068 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4069 }
4070 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004071}
4072
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004073void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4074 const camera_metadata_t *resultMetadata)
4075{
4076 // Find the pending request for this result metadata.
4077 auto requestIter = mPendingRequestsList.begin();
4078 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4079 requestIter++;
4080 }
4081
4082 if (requestIter == mPendingRequestsList.end()) {
4083 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4084 return;
4085 }
4086
4087 // Update the result metadata
4088 requestIter->resultMetadata = resultMetadata;
4089
4090 // Check what type of request this is.
4091 bool liveRequest = false;
4092 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004093 // HDR+ request doesn't have partial results.
4094 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004095 } else if (requestIter->input_buffer != nullptr) {
4096 // Reprocessing request result is the same as settings.
4097 requestIter->resultMetadata = requestIter->settings;
4098 // Reprocessing request doesn't have partial results.
4099 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4100 } else {
4101 liveRequest = true;
4102 requestIter->partial_result_cnt++;
4103 mPendingLiveRequest--;
4104
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004105 {
4106 Mutex::Autolock l(gHdrPlusClientLock);
4107 // For a live request, send the metadata to HDR+ client.
4108 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4109 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4110 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4111 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004112 }
4113 }
4114
4115 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4116 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4117 bool readyToSend = true;
4118
4119 // Iterate through the pending requests to send out shutter callbacks and results that are
4120 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4121 // live requests that don't have result metadata yet.
4122 auto iter = mPendingRequestsList.begin();
4123 while (iter != mPendingRequestsList.end()) {
4124 // Check if current pending request is ready. If it's not ready, the following pending
4125 // requests are also not ready.
4126 if (readyToSend && iter->resultMetadata == nullptr) {
4127 readyToSend = false;
4128 }
4129
4130 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4131
4132 std::vector<camera3_stream_buffer_t> outputBuffers;
4133
4134 camera3_capture_result_t result = {};
4135 result.frame_number = iter->frame_number;
4136 result.result = iter->resultMetadata;
4137 result.partial_result = iter->partial_result_cnt;
4138
4139 // If this pending buffer has result metadata, we may be able to send out shutter callback
4140 // and result metadata.
4141 if (iter->resultMetadata != nullptr) {
4142 if (!readyToSend) {
4143 // If any of the previous pending request is not ready, this pending request is
4144 // also not ready to send in order to keep shutter callbacks and result metadata
4145 // in order.
4146 iter++;
4147 continue;
4148 }
4149
4150 // Invoke shutter callback if not yet.
4151 if (!iter->shutter_notified) {
4152 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4153
4154 // Find the timestamp in HDR+ result metadata
4155 camera_metadata_ro_entry_t entry;
4156 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4157 ANDROID_SENSOR_TIMESTAMP, &entry);
4158 if (res != OK) {
4159 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4160 __FUNCTION__, iter->frame_number, strerror(-res), res);
4161 } else {
4162 timestamp = entry.data.i64[0];
4163 }
4164
4165 camera3_notify_msg_t notify_msg = {};
4166 notify_msg.type = CAMERA3_MSG_SHUTTER;
4167 notify_msg.message.shutter.frame_number = iter->frame_number;
4168 notify_msg.message.shutter.timestamp = timestamp;
4169 orchestrateNotify(&notify_msg);
4170 iter->shutter_notified = true;
4171 }
4172
4173 result.input_buffer = iter->input_buffer;
4174
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004175 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4176 // If the result metadata belongs to a live request, notify errors for previous pending
4177 // live requests.
4178 mPendingLiveRequest--;
4179
4180 CameraMetadata dummyMetadata;
4181 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4182 result.result = dummyMetadata.release();
4183
4184 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004185
4186 // partial_result should be PARTIAL_RESULT_CNT in case of
4187 // ERROR_RESULT.
4188 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4189 result.partial_result = PARTIAL_RESULT_COUNT;
4190
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004191 } else {
4192 iter++;
4193 continue;
4194 }
4195
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004196 // Prepare output buffer array
4197 for (auto bufferInfoIter = iter->buffers.begin();
4198 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4199 if (bufferInfoIter->buffer != nullptr) {
4200
4201 QCamera3Channel *channel =
4202 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4203 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4204
4205 // Check if this buffer is a dropped frame.
4206 auto frameDropIter = mPendingFrameDropList.begin();
4207 while (frameDropIter != mPendingFrameDropList.end()) {
4208 if((frameDropIter->stream_ID == streamID) &&
4209 (frameDropIter->frame_number == frameNumber)) {
4210 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4211 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4212 streamID);
4213 mPendingFrameDropList.erase(frameDropIter);
4214 break;
4215 } else {
4216 frameDropIter++;
4217 }
4218 }
4219
4220 // Check buffer error status
4221 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4222 bufferInfoIter->buffer->buffer);
4223 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4224
4225 outputBuffers.push_back(*(bufferInfoIter->buffer));
4226 free(bufferInfoIter->buffer);
4227 bufferInfoIter->buffer = NULL;
4228 }
4229 }
4230
4231 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4232 result.num_output_buffers = outputBuffers.size();
4233
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004234 orchestrateResult(&result);
4235
4236 // For reprocessing, result metadata is the same as settings so do not free it here to
4237 // avoid double free.
4238 if (result.result != iter->settings) {
4239 free_camera_metadata((camera_metadata_t *)result.result);
4240 }
4241 iter->resultMetadata = nullptr;
4242 iter = erasePendingRequest(iter);
4243 }
4244
4245 if (liveRequest) {
4246 for (auto &iter : mPendingRequestsList) {
4247 // Increment pipeline depth for the following pending requests.
4248 if (iter.frame_number > frameNumber) {
4249 iter.pipeline_depth++;
4250 }
4251 }
4252 }
4253
4254 unblockRequestIfNecessary();
4255}
4256
Thierry Strudel3d639192016-09-09 11:52:26 -07004257/*===========================================================================
4258 * FUNCTION : unblockRequestIfNecessary
4259 *
4260 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4261 * that mMutex is held when this function is called.
4262 *
4263 * PARAMETERS :
4264 *
4265 * RETURN :
4266 *
4267 *==========================================================================*/
4268void QCamera3HardwareInterface::unblockRequestIfNecessary()
4269{
4270 // Unblock process_capture_request
4271 pthread_cond_signal(&mRequestCond);
4272}
4273
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004274/*===========================================================================
4275 * FUNCTION : isHdrSnapshotRequest
4276 *
4277 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4278 *
4279 * PARAMETERS : camera3 request structure
4280 *
4281 * RETURN : boolean decision variable
4282 *
4283 *==========================================================================*/
4284bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4285{
4286 if (request == NULL) {
4287 LOGE("Invalid request handle");
4288 assert(0);
4289 return false;
4290 }
4291
4292 if (!mForceHdrSnapshot) {
4293 CameraMetadata frame_settings;
4294 frame_settings = request->settings;
4295
4296 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4297 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4298 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4299 return false;
4300 }
4301 } else {
4302 return false;
4303 }
4304
4305 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4306 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4307 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4308 return false;
4309 }
4310 } else {
4311 return false;
4312 }
4313 }
4314
4315 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4316 if (request->output_buffers[i].stream->format
4317 == HAL_PIXEL_FORMAT_BLOB) {
4318 return true;
4319 }
4320 }
4321
4322 return false;
4323}
4324/*===========================================================================
4325 * FUNCTION : orchestrateRequest
4326 *
4327 * DESCRIPTION: Orchestrates a capture request from camera service
4328 *
4329 * PARAMETERS :
4330 * @request : request from framework to process
4331 *
4332 * RETURN : Error status codes
4333 *
4334 *==========================================================================*/
4335int32_t QCamera3HardwareInterface::orchestrateRequest(
4336 camera3_capture_request_t *request)
4337{
4338
4339 uint32_t originalFrameNumber = request->frame_number;
4340 uint32_t originalOutputCount = request->num_output_buffers;
4341 const camera_metadata_t *original_settings = request->settings;
4342 List<InternalRequest> internallyRequestedStreams;
4343 List<InternalRequest> emptyInternalList;
4344
4345 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4346 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4347 uint32_t internalFrameNumber;
4348 CameraMetadata modified_meta;
4349
4350
4351 /* Add Blob channel to list of internally requested streams */
4352 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4353 if (request->output_buffers[i].stream->format
4354 == HAL_PIXEL_FORMAT_BLOB) {
4355 InternalRequest streamRequested;
4356 streamRequested.meteringOnly = 1;
4357 streamRequested.need_metadata = 0;
4358 streamRequested.stream = request->output_buffers[i].stream;
4359 internallyRequestedStreams.push_back(streamRequested);
4360 }
4361 }
4362 request->num_output_buffers = 0;
4363 auto itr = internallyRequestedStreams.begin();
4364
4365 /* Modify setting to set compensation */
4366 modified_meta = request->settings;
4367 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4368 uint8_t aeLock = 1;
4369 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4370 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4371 camera_metadata_t *modified_settings = modified_meta.release();
4372 request->settings = modified_settings;
4373
4374 /* Capture Settling & -2x frame */
4375 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4376 request->frame_number = internalFrameNumber;
4377 processCaptureRequest(request, internallyRequestedStreams);
4378
4379 request->num_output_buffers = originalOutputCount;
4380 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4381 request->frame_number = internalFrameNumber;
4382 processCaptureRequest(request, emptyInternalList);
4383 request->num_output_buffers = 0;
4384
4385 modified_meta = modified_settings;
4386 expCompensation = 0;
4387 aeLock = 1;
4388 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4389 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4390 modified_settings = modified_meta.release();
4391 request->settings = modified_settings;
4392
4393 /* Capture Settling & 0X frame */
4394
4395 itr = internallyRequestedStreams.begin();
4396 if (itr == internallyRequestedStreams.end()) {
4397 LOGE("Error Internally Requested Stream list is empty");
4398 assert(0);
4399 } else {
4400 itr->need_metadata = 0;
4401 itr->meteringOnly = 1;
4402 }
4403
4404 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4405 request->frame_number = internalFrameNumber;
4406 processCaptureRequest(request, internallyRequestedStreams);
4407
4408 itr = internallyRequestedStreams.begin();
4409 if (itr == internallyRequestedStreams.end()) {
4410 ALOGE("Error Internally Requested Stream list is empty");
4411 assert(0);
4412 } else {
4413 itr->need_metadata = 1;
4414 itr->meteringOnly = 0;
4415 }
4416
4417 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4418 request->frame_number = internalFrameNumber;
4419 processCaptureRequest(request, internallyRequestedStreams);
4420
4421 /* Capture 2X frame*/
4422 modified_meta = modified_settings;
4423 expCompensation = GB_HDR_2X_STEP_EV;
4424 aeLock = 1;
4425 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4426 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4427 modified_settings = modified_meta.release();
4428 request->settings = modified_settings;
4429
4430 itr = internallyRequestedStreams.begin();
4431 if (itr == internallyRequestedStreams.end()) {
4432 ALOGE("Error Internally Requested Stream list is empty");
4433 assert(0);
4434 } else {
4435 itr->need_metadata = 0;
4436 itr->meteringOnly = 1;
4437 }
4438 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4439 request->frame_number = internalFrameNumber;
4440 processCaptureRequest(request, internallyRequestedStreams);
4441
4442 itr = internallyRequestedStreams.begin();
4443 if (itr == internallyRequestedStreams.end()) {
4444 ALOGE("Error Internally Requested Stream list is empty");
4445 assert(0);
4446 } else {
4447 itr->need_metadata = 1;
4448 itr->meteringOnly = 0;
4449 }
4450
4451 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4452 request->frame_number = internalFrameNumber;
4453 processCaptureRequest(request, internallyRequestedStreams);
4454
4455
4456 /* Capture 2X on original streaming config*/
4457 internallyRequestedStreams.clear();
4458
4459 /* Restore original settings pointer */
4460 request->settings = original_settings;
4461 } else {
4462 uint32_t internalFrameNumber;
4463 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4464 request->frame_number = internalFrameNumber;
4465 return processCaptureRequest(request, internallyRequestedStreams);
4466 }
4467
4468 return NO_ERROR;
4469}
4470
4471/*===========================================================================
4472 * FUNCTION : orchestrateResult
4473 *
4474 * DESCRIPTION: Orchestrates a capture result to camera service
4475 *
4476 * PARAMETERS :
4477 * @request : request from framework to process
4478 *
4479 * RETURN :
4480 *
4481 *==========================================================================*/
4482void QCamera3HardwareInterface::orchestrateResult(
4483 camera3_capture_result_t *result)
4484{
4485 uint32_t frameworkFrameNumber;
4486 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4487 frameworkFrameNumber);
4488 if (rc != NO_ERROR) {
4489 LOGE("Cannot find translated frameworkFrameNumber");
4490 assert(0);
4491 } else {
4492 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004493 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004494 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004495 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004496 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4497 camera_metadata_entry_t entry;
4498 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4499 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004500 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004501 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4502 if (ret != OK)
4503 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004504 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004505 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004506 result->frame_number = frameworkFrameNumber;
4507 mCallbackOps->process_capture_result(mCallbackOps, result);
4508 }
4509 }
4510}
4511
4512/*===========================================================================
4513 * FUNCTION : orchestrateNotify
4514 *
4515 * DESCRIPTION: Orchestrates a notify to camera service
4516 *
4517 * PARAMETERS :
4518 * @request : request from framework to process
4519 *
4520 * RETURN :
4521 *
4522 *==========================================================================*/
4523void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4524{
4525 uint32_t frameworkFrameNumber;
4526 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004527 int32_t rc = NO_ERROR;
4528
4529 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004530 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004531
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004532 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004533 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4534 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4535 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004536 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004537 LOGE("Cannot find translated frameworkFrameNumber");
4538 assert(0);
4539 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004540 }
4541 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004542
4543 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4544 LOGD("Internal Request drop the notifyCb");
4545 } else {
4546 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4547 mCallbackOps->notify(mCallbackOps, notify_msg);
4548 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004549}
4550
4551/*===========================================================================
4552 * FUNCTION : FrameNumberRegistry
4553 *
4554 * DESCRIPTION: Constructor
4555 *
4556 * PARAMETERS :
4557 *
4558 * RETURN :
4559 *
4560 *==========================================================================*/
4561FrameNumberRegistry::FrameNumberRegistry()
4562{
4563 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4564}
4565
4566/*===========================================================================
4567 * FUNCTION : ~FrameNumberRegistry
4568 *
4569 * DESCRIPTION: Destructor
4570 *
4571 * PARAMETERS :
4572 *
4573 * RETURN :
4574 *
4575 *==========================================================================*/
4576FrameNumberRegistry::~FrameNumberRegistry()
4577{
4578}
4579
4580/*===========================================================================
4581 * FUNCTION : PurgeOldEntriesLocked
4582 *
4583 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4584 *
4585 * PARAMETERS :
4586 *
4587 * RETURN : NONE
4588 *
4589 *==========================================================================*/
4590void FrameNumberRegistry::purgeOldEntriesLocked()
4591{
4592 while (_register.begin() != _register.end()) {
4593 auto itr = _register.begin();
4594 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4595 _register.erase(itr);
4596 } else {
4597 return;
4598 }
4599 }
4600}
4601
4602/*===========================================================================
4603 * FUNCTION : allocStoreInternalFrameNumber
4604 *
4605 * DESCRIPTION: Method to note down a framework request and associate a new
4606 * internal request number against it
4607 *
4608 * PARAMETERS :
4609 * @fFrameNumber: Identifier given by framework
4610 * @internalFN : Output parameter which will have the newly generated internal
4611 * entry
4612 *
4613 * RETURN : Error code
4614 *
4615 *==========================================================================*/
4616int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4617 uint32_t &internalFrameNumber)
4618{
4619 Mutex::Autolock lock(mRegistryLock);
4620 internalFrameNumber = _nextFreeInternalNumber++;
4621 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4622 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4623 purgeOldEntriesLocked();
4624 return NO_ERROR;
4625}
4626
4627/*===========================================================================
4628 * FUNCTION : generateStoreInternalFrameNumber
4629 *
4630 * DESCRIPTION: Method to associate a new internal request number independent
4631 * of any associate with framework requests
4632 *
4633 * PARAMETERS :
4634 * @internalFrame#: Output parameter which will have the newly generated internal
4635 *
4636 *
4637 * RETURN : Error code
4638 *
4639 *==========================================================================*/
4640int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4641{
4642 Mutex::Autolock lock(mRegistryLock);
4643 internalFrameNumber = _nextFreeInternalNumber++;
4644 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4645 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4646 purgeOldEntriesLocked();
4647 return NO_ERROR;
4648}
4649
4650/*===========================================================================
4651 * FUNCTION : getFrameworkFrameNumber
4652 *
4653 * DESCRIPTION: Method to query the framework framenumber given an internal #
4654 *
4655 * PARAMETERS :
4656 * @internalFrame#: Internal reference
4657 * @frameworkframenumber: Output parameter holding framework frame entry
4658 *
4659 * RETURN : Error code
4660 *
4661 *==========================================================================*/
4662int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4663 uint32_t &frameworkFrameNumber)
4664{
4665 Mutex::Autolock lock(mRegistryLock);
4666 auto itr = _register.find(internalFrameNumber);
4667 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004668 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004669 return -ENOENT;
4670 }
4671
4672 frameworkFrameNumber = itr->second;
4673 purgeOldEntriesLocked();
4674 return NO_ERROR;
4675}
Thierry Strudel3d639192016-09-09 11:52:26 -07004676
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004677status_t QCamera3HardwareInterface::fillPbStreamConfig(
4678 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4679 QCamera3Channel *channel, uint32_t streamIndex) {
4680 if (config == nullptr) {
4681 LOGE("%s: config is null", __FUNCTION__);
4682 return BAD_VALUE;
4683 }
4684
4685 if (channel == nullptr) {
4686 LOGE("%s: channel is null", __FUNCTION__);
4687 return BAD_VALUE;
4688 }
4689
4690 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4691 if (stream == nullptr) {
4692 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4693 return NAME_NOT_FOUND;
4694 }
4695
4696 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4697 if (streamInfo == nullptr) {
4698 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4699 return NAME_NOT_FOUND;
4700 }
4701
4702 config->id = pbStreamId;
4703 config->image.width = streamInfo->dim.width;
4704 config->image.height = streamInfo->dim.height;
4705 config->image.padding = 0;
4706 config->image.format = pbStreamFormat;
4707
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004708 uint32_t totalPlaneSize = 0;
4709
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004710 // Fill plane information.
4711 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4712 pbcamera::PlaneConfiguration plane;
4713 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4714 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4715 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004716
4717 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004718 }
4719
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004720 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004721 return OK;
4722}
4723
Thierry Strudel3d639192016-09-09 11:52:26 -07004724/*===========================================================================
4725 * FUNCTION : processCaptureRequest
4726 *
4727 * DESCRIPTION: process a capture request from camera service
4728 *
4729 * PARAMETERS :
4730 * @request : request from framework to process
4731 *
4732 * RETURN :
4733 *
4734 *==========================================================================*/
4735int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004736 camera3_capture_request_t *request,
4737 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004738{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004739 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004740 int rc = NO_ERROR;
4741 int32_t request_id;
4742 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004743 bool isVidBufRequested = false;
4744 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004745 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004746
4747 pthread_mutex_lock(&mMutex);
4748
4749 // Validate current state
4750 switch (mState) {
4751 case CONFIGURED:
4752 case STARTED:
4753 /* valid state */
4754 break;
4755
4756 case ERROR:
4757 pthread_mutex_unlock(&mMutex);
4758 handleCameraDeviceError();
4759 return -ENODEV;
4760
4761 default:
4762 LOGE("Invalid state %d", mState);
4763 pthread_mutex_unlock(&mMutex);
4764 return -ENODEV;
4765 }
4766
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004767 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004768 if (rc != NO_ERROR) {
4769 LOGE("incoming request is not valid");
4770 pthread_mutex_unlock(&mMutex);
4771 return rc;
4772 }
4773
4774 meta = request->settings;
4775
4776 // For first capture request, send capture intent, and
4777 // stream on all streams
4778 if (mState == CONFIGURED) {
4779 // send an unconfigure to the backend so that the isp
4780 // resources are deallocated
4781 if (!mFirstConfiguration) {
4782 cam_stream_size_info_t stream_config_info;
4783 int32_t hal_version = CAM_HAL_V3;
4784 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4785 stream_config_info.buffer_info.min_buffers =
4786 MIN_INFLIGHT_REQUESTS;
4787 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004788 m_bIs4KVideo ? 0 :
4789 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004790 clear_metadata_buffer(mParameters);
4791 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4792 CAM_INTF_PARM_HAL_VERSION, hal_version);
4793 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4794 CAM_INTF_META_STREAM_INFO, stream_config_info);
4795 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4796 mParameters);
4797 if (rc < 0) {
4798 LOGE("set_parms for unconfigure failed");
4799 pthread_mutex_unlock(&mMutex);
4800 return rc;
4801 }
4802 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004803 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004804 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004805 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004806 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004807 property_get("persist.camera.is_type", is_type_value, "4");
4808 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4809 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4810 property_get("persist.camera.is_type_preview", is_type_value, "4");
4811 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4812 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004813
4814 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4815 int32_t hal_version = CAM_HAL_V3;
4816 uint8_t captureIntent =
4817 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4818 mCaptureIntent = captureIntent;
4819 clear_metadata_buffer(mParameters);
4820 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4821 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4822 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004823 if (mFirstConfiguration) {
4824 // configure instant AEC
4825 // Instant AEC is a session based parameter and it is needed only
4826 // once per complete session after open camera.
4827 // i.e. This is set only once for the first capture request, after open camera.
4828 setInstantAEC(meta);
4829 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004830 uint8_t fwkVideoStabMode=0;
4831 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4832 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4833 }
4834
Xue Tuecac74e2017-04-17 13:58:15 -07004835 // If EIS setprop is enabled then only turn it on for video/preview
4836 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004837 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004838 int32_t vsMode;
4839 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4841 rc = BAD_VALUE;
4842 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004843 LOGD("setEis %d", setEis);
4844 bool eis3Supported = false;
4845 size_t count = IS_TYPE_MAX;
4846 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4847 for (size_t i = 0; i < count; i++) {
4848 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4849 eis3Supported = true;
4850 break;
4851 }
4852 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004853
4854 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004855 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004856 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4857 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004858 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4859 is_type = isTypePreview;
4860 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4861 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4862 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004864 } else {
4865 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004867 } else {
4868 is_type = IS_TYPE_NONE;
4869 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004870 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004871 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004872 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4873 }
4874 }
4875
4876 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4877 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4878
Thierry Strudel54dc9782017-02-15 12:12:10 -08004879 //Disable tintless only if the property is set to 0
4880 memset(prop, 0, sizeof(prop));
4881 property_get("persist.camera.tintless.enable", prop, "1");
4882 int32_t tintless_value = atoi(prop);
4883
Thierry Strudel3d639192016-09-09 11:52:26 -07004884 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4885 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004886
Thierry Strudel3d639192016-09-09 11:52:26 -07004887 //Disable CDS for HFR mode or if DIS/EIS is on.
4888 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4889 //after every configure_stream
4890 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4891 (m_bIsVideo)) {
4892 int32_t cds = CAM_CDS_MODE_OFF;
4893 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4894 CAM_INTF_PARM_CDS_MODE, cds))
4895 LOGE("Failed to disable CDS for HFR mode");
4896
4897 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004898
4899 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4900 uint8_t* use_av_timer = NULL;
4901
4902 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004903 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004904 use_av_timer = &m_debug_avtimer;
4905 }
4906 else{
4907 use_av_timer =
4908 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004909 if (use_av_timer) {
4910 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4911 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004912 }
4913
4914 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4915 rc = BAD_VALUE;
4916 }
4917 }
4918
Thierry Strudel3d639192016-09-09 11:52:26 -07004919 setMobicat();
4920
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004921 uint8_t nrMode = 0;
4922 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4923 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4924 }
4925
Thierry Strudel3d639192016-09-09 11:52:26 -07004926 /* Set fps and hfr mode while sending meta stream info so that sensor
4927 * can configure appropriate streaming mode */
4928 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004929 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4930 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004931 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4932 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004933 if (rc == NO_ERROR) {
4934 int32_t max_fps =
4935 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004936 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4938 }
4939 /* For HFR, more buffers are dequeued upfront to improve the performance */
4940 if (mBatchSize) {
4941 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4942 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4943 }
4944 }
4945 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004946 LOGE("setHalFpsRange failed");
4947 }
4948 }
4949 if (meta.exists(ANDROID_CONTROL_MODE)) {
4950 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4951 rc = extractSceneMode(meta, metaMode, mParameters);
4952 if (rc != NO_ERROR) {
4953 LOGE("extractSceneMode failed");
4954 }
4955 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004956 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004957
Thierry Strudel04e026f2016-10-10 11:27:36 -07004958 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4959 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4960 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4961 rc = setVideoHdrMode(mParameters, vhdr);
4962 if (rc != NO_ERROR) {
4963 LOGE("setVideoHDR is failed");
4964 }
4965 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004966
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004967 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004968 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004969 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004970 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4972 sensorModeFullFov)) {
4973 rc = BAD_VALUE;
4974 }
4975 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004976 //TODO: validate the arguments, HSV scenemode should have only the
4977 //advertised fps ranges
4978
4979 /*set the capture intent, hal version, tintless, stream info,
4980 *and disenable parameters to the backend*/
4981 LOGD("set_parms META_STREAM_INFO " );
4982 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004983 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4984 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004985 mStreamConfigInfo.type[i],
4986 mStreamConfigInfo.stream_sizes[i].width,
4987 mStreamConfigInfo.stream_sizes[i].height,
4988 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004989 mStreamConfigInfo.format[i],
4990 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004991 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004992
Thierry Strudel3d639192016-09-09 11:52:26 -07004993 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4994 mParameters);
4995 if (rc < 0) {
4996 LOGE("set_parms failed for hal version, stream info");
4997 }
4998
Chien-Yu Chenee335912017-02-09 17:53:20 -08004999 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5000 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005001 if (rc != NO_ERROR) {
5002 LOGE("Failed to get sensor output size");
5003 pthread_mutex_unlock(&mMutex);
5004 goto error_exit;
5005 }
5006
5007 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5008 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08005009 mSensorModeInfo.active_array_size.width,
5010 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005011
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005012 {
5013 Mutex::Autolock l(gHdrPlusClientLock);
5014 if (EaselManagerClientOpened) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07005015 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005016 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
5017 if (rc != OK) {
5018 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5019 mCameraId, mSensorModeInfo.op_pixel_clk);
5020 pthread_mutex_unlock(&mMutex);
5021 goto error_exit;
5022 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005023 }
5024 }
5025
Thierry Strudel3d639192016-09-09 11:52:26 -07005026 /* Set batchmode before initializing channel. Since registerBuffer
5027 * internally initializes some of the channels, better set batchmode
5028 * even before first register buffer */
5029 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5030 it != mStreamInfo.end(); it++) {
5031 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5032 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5033 && mBatchSize) {
5034 rc = channel->setBatchSize(mBatchSize);
5035 //Disable per frame map unmap for HFR/batchmode case
5036 rc |= channel->setPerFrameMapUnmap(false);
5037 if (NO_ERROR != rc) {
5038 LOGE("Channel init failed %d", rc);
5039 pthread_mutex_unlock(&mMutex);
5040 goto error_exit;
5041 }
5042 }
5043 }
5044
5045 //First initialize all streams
5046 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5047 it != mStreamInfo.end(); it++) {
5048 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005049
5050 /* Initial value of NR mode is needed before stream on */
5051 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005052 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5053 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005054 setEis) {
5055 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5056 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5057 is_type = mStreamConfigInfo.is_type[i];
5058 break;
5059 }
5060 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005061 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005062 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005063 rc = channel->initialize(IS_TYPE_NONE);
5064 }
5065 if (NO_ERROR != rc) {
5066 LOGE("Channel initialization failed %d", rc);
5067 pthread_mutex_unlock(&mMutex);
5068 goto error_exit;
5069 }
5070 }
5071
5072 if (mRawDumpChannel) {
5073 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5074 if (rc != NO_ERROR) {
5075 LOGE("Error: Raw Dump Channel init failed");
5076 pthread_mutex_unlock(&mMutex);
5077 goto error_exit;
5078 }
5079 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005080 if (mHdrPlusRawSrcChannel) {
5081 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5082 if (rc != NO_ERROR) {
5083 LOGE("Error: HDR+ RAW Source Channel init failed");
5084 pthread_mutex_unlock(&mMutex);
5085 goto error_exit;
5086 }
5087 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005088 if (mSupportChannel) {
5089 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5090 if (rc < 0) {
5091 LOGE("Support channel initialization failed");
5092 pthread_mutex_unlock(&mMutex);
5093 goto error_exit;
5094 }
5095 }
5096 if (mAnalysisChannel) {
5097 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5098 if (rc < 0) {
5099 LOGE("Analysis channel initialization failed");
5100 pthread_mutex_unlock(&mMutex);
5101 goto error_exit;
5102 }
5103 }
5104 if (mDummyBatchChannel) {
5105 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5106 if (rc < 0) {
5107 LOGE("mDummyBatchChannel setBatchSize failed");
5108 pthread_mutex_unlock(&mMutex);
5109 goto error_exit;
5110 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005111 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005112 if (rc < 0) {
5113 LOGE("mDummyBatchChannel initialization failed");
5114 pthread_mutex_unlock(&mMutex);
5115 goto error_exit;
5116 }
5117 }
5118
5119 // Set bundle info
5120 rc = setBundleInfo();
5121 if (rc < 0) {
5122 LOGE("setBundleInfo failed %d", rc);
5123 pthread_mutex_unlock(&mMutex);
5124 goto error_exit;
5125 }
5126
5127 //update settings from app here
5128 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5129 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5130 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5131 }
5132 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5133 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5134 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5135 }
5136 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5137 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5138 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5139
5140 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5141 (mLinkedCameraId != mCameraId) ) {
5142 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5143 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005144 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005145 goto error_exit;
5146 }
5147 }
5148
5149 // add bundle related cameras
5150 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5151 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005152 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5153 &m_pDualCamCmdPtr->bundle_info;
5154 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005155 if (mIsDeviceLinked)
5156 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5157 else
5158 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5159
5160 pthread_mutex_lock(&gCamLock);
5161
5162 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5163 LOGE("Dualcam: Invalid Session Id ");
5164 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005165 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005166 goto error_exit;
5167 }
5168
5169 if (mIsMainCamera == 1) {
5170 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5171 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005172 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005173 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 // related session id should be session id of linked session
5175 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5176 } else {
5177 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5178 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005179 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005180 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005181 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5182 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005183 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005184 pthread_mutex_unlock(&gCamLock);
5185
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005186 rc = mCameraHandle->ops->set_dual_cam_cmd(
5187 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005188 if (rc < 0) {
5189 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005190 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005191 goto error_exit;
5192 }
5193 }
5194
5195 //Then start them.
5196 LOGH("Start META Channel");
5197 rc = mMetadataChannel->start();
5198 if (rc < 0) {
5199 LOGE("META channel start failed");
5200 pthread_mutex_unlock(&mMutex);
5201 goto error_exit;
5202 }
5203
5204 if (mAnalysisChannel) {
5205 rc = mAnalysisChannel->start();
5206 if (rc < 0) {
5207 LOGE("Analysis channel start failed");
5208 mMetadataChannel->stop();
5209 pthread_mutex_unlock(&mMutex);
5210 goto error_exit;
5211 }
5212 }
5213
5214 if (mSupportChannel) {
5215 rc = mSupportChannel->start();
5216 if (rc < 0) {
5217 LOGE("Support channel start failed");
5218 mMetadataChannel->stop();
5219 /* Although support and analysis are mutually exclusive today
5220 adding it in anycase for future proofing */
5221 if (mAnalysisChannel) {
5222 mAnalysisChannel->stop();
5223 }
5224 pthread_mutex_unlock(&mMutex);
5225 goto error_exit;
5226 }
5227 }
5228 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5229 it != mStreamInfo.end(); it++) {
5230 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5231 LOGH("Start Processing Channel mask=%d",
5232 channel->getStreamTypeMask());
5233 rc = channel->start();
5234 if (rc < 0) {
5235 LOGE("channel start failed");
5236 pthread_mutex_unlock(&mMutex);
5237 goto error_exit;
5238 }
5239 }
5240
5241 if (mRawDumpChannel) {
5242 LOGD("Starting raw dump stream");
5243 rc = mRawDumpChannel->start();
5244 if (rc != NO_ERROR) {
5245 LOGE("Error Starting Raw Dump Channel");
5246 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5247 it != mStreamInfo.end(); it++) {
5248 QCamera3Channel *channel =
5249 (QCamera3Channel *)(*it)->stream->priv;
5250 LOGH("Stopping Processing Channel mask=%d",
5251 channel->getStreamTypeMask());
5252 channel->stop();
5253 }
5254 if (mSupportChannel)
5255 mSupportChannel->stop();
5256 if (mAnalysisChannel) {
5257 mAnalysisChannel->stop();
5258 }
5259 mMetadataChannel->stop();
5260 pthread_mutex_unlock(&mMutex);
5261 goto error_exit;
5262 }
5263 }
5264
5265 if (mChannelHandle) {
5266
5267 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5268 mChannelHandle);
5269 if (rc != NO_ERROR) {
5270 LOGE("start_channel failed %d", rc);
5271 pthread_mutex_unlock(&mMutex);
5272 goto error_exit;
5273 }
5274 }
5275
5276 goto no_error;
5277error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005278 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005279 return rc;
5280no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005281 mWokenUpByDaemon = false;
5282 mPendingLiveRequest = 0;
5283 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 }
5285
Chien-Yu Chenee335912017-02-09 17:53:20 -08005286 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chened0a4c92017-05-01 18:25:03 +00005287 {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005288 Mutex::Autolock l(gHdrPlusClientLock);
5289 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5290 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5291 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5292 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5293 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5294 rc = enableHdrPlusModeLocked();
Chien-Yu Chenee335912017-02-09 17:53:20 -08005295 if (rc != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005296 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -08005297 pthread_mutex_unlock(&mMutex);
5298 return rc;
5299 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005300
5301 mFirstPreviewIntentSeen = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -08005302 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08005303 }
5304
Thierry Strudel3d639192016-09-09 11:52:26 -07005305 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005306 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005307
5308 if (mFlushPerf) {
5309 //we cannot accept any requests during flush
5310 LOGE("process_capture_request cannot proceed during flush");
5311 pthread_mutex_unlock(&mMutex);
5312 return NO_ERROR; //should return an error
5313 }
5314
5315 if (meta.exists(ANDROID_REQUEST_ID)) {
5316 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5317 mCurrentRequestId = request_id;
5318 LOGD("Received request with id: %d", request_id);
5319 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5320 LOGE("Unable to find request id field, \
5321 & no previous id available");
5322 pthread_mutex_unlock(&mMutex);
5323 return NAME_NOT_FOUND;
5324 } else {
5325 LOGD("Re-using old request id");
5326 request_id = mCurrentRequestId;
5327 }
5328
5329 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5330 request->num_output_buffers,
5331 request->input_buffer,
5332 frameNumber);
5333 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005334 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005335 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005336 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005337 uint32_t snapshotStreamId = 0;
5338 for (size_t i = 0; i < request->num_output_buffers; i++) {
5339 const camera3_stream_buffer_t& output = request->output_buffers[i];
5340 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5341
Emilian Peev7650c122017-01-19 08:24:33 -08005342 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5343 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005344 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005345 blob_request = 1;
5346 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5347 }
5348
5349 if (output.acquire_fence != -1) {
5350 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5351 close(output.acquire_fence);
5352 if (rc != OK) {
5353 LOGE("sync wait failed %d", rc);
5354 pthread_mutex_unlock(&mMutex);
5355 return rc;
5356 }
5357 }
5358
Emilian Peev0f3c3162017-03-15 12:57:46 +00005359 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5360 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005361 depthRequestPresent = true;
5362 continue;
5363 }
5364
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005365 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005366 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005367
5368 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5369 isVidBufRequested = true;
5370 }
5371 }
5372
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005373 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5374 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5375 itr++) {
5376 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5377 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5378 channel->getStreamID(channel->getStreamTypeMask());
5379
5380 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5381 isVidBufRequested = true;
5382 }
5383 }
5384
Thierry Strudel3d639192016-09-09 11:52:26 -07005385 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005386 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005387 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005388 }
5389 if (blob_request && mRawDumpChannel) {
5390 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005391 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005392 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005393 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005394 }
5395
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005396 {
5397 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5398 // Request a RAW buffer if
5399 // 1. mHdrPlusRawSrcChannel is valid.
5400 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5401 // 3. There is no pending HDR+ request.
5402 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5403 mHdrPlusPendingRequests.size() == 0) {
5404 streamsArray.stream_request[streamsArray.num_streams].streamID =
5405 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5406 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5407 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005408 }
5409
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005410 //extract capture intent
5411 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5412 mCaptureIntent =
5413 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5414 }
5415
5416 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5417 mCacMode =
5418 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5419 }
5420
5421 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005422 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005423
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005424 {
5425 Mutex::Autolock l(gHdrPlusClientLock);
5426 // If this request has a still capture intent, try to submit an HDR+ request.
5427 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5428 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5429 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5430 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005431 }
5432
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005433 if (hdrPlusRequest) {
5434 // For a HDR+ request, just set the frame parameters.
5435 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5436 if (rc < 0) {
5437 LOGE("fail to set frame parameters");
5438 pthread_mutex_unlock(&mMutex);
5439 return rc;
5440 }
5441 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005442 /* Parse the settings:
5443 * - For every request in NORMAL MODE
5444 * - For every request in HFR mode during preview only case
5445 * - For first request of every batch in HFR mode during video
5446 * recording. In batchmode the same settings except frame number is
5447 * repeated in each request of the batch.
5448 */
5449 if (!mBatchSize ||
5450 (mBatchSize && !isVidBufRequested) ||
5451 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005452 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005453 if (rc < 0) {
5454 LOGE("fail to set frame parameters");
5455 pthread_mutex_unlock(&mMutex);
5456 return rc;
5457 }
5458 }
5459 /* For batchMode HFR, setFrameParameters is not called for every
5460 * request. But only frame number of the latest request is parsed.
5461 * Keep track of first and last frame numbers in a batch so that
5462 * metadata for the frame numbers of batch can be duplicated in
5463 * handleBatchMetadta */
5464 if (mBatchSize) {
5465 if (!mToBeQueuedVidBufs) {
5466 //start of the batch
5467 mFirstFrameNumberInBatch = request->frame_number;
5468 }
5469 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5470 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5471 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005472 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005473 return BAD_VALUE;
5474 }
5475 }
5476 if (mNeedSensorRestart) {
5477 /* Unlock the mutex as restartSensor waits on the channels to be
5478 * stopped, which in turn calls stream callback functions -
5479 * handleBufferWithLock and handleMetadataWithLock */
5480 pthread_mutex_unlock(&mMutex);
5481 rc = dynamicUpdateMetaStreamInfo();
5482 if (rc != NO_ERROR) {
5483 LOGE("Restarting the sensor failed");
5484 return BAD_VALUE;
5485 }
5486 mNeedSensorRestart = false;
5487 pthread_mutex_lock(&mMutex);
5488 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005489 if(mResetInstantAEC) {
5490 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5491 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5492 mResetInstantAEC = false;
5493 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005494 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005495 if (request->input_buffer->acquire_fence != -1) {
5496 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5497 close(request->input_buffer->acquire_fence);
5498 if (rc != OK) {
5499 LOGE("input buffer sync wait failed %d", rc);
5500 pthread_mutex_unlock(&mMutex);
5501 return rc;
5502 }
5503 }
5504 }
5505
5506 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5507 mLastCustIntentFrmNum = frameNumber;
5508 }
5509 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005510 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005511 pendingRequestIterator latestRequest;
5512 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005513 pendingRequest.num_buffers = depthRequestPresent ?
5514 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005515 pendingRequest.request_id = request_id;
5516 pendingRequest.blob_request = blob_request;
5517 pendingRequest.timestamp = 0;
5518 pendingRequest.bUrgentReceived = 0;
5519 if (request->input_buffer) {
5520 pendingRequest.input_buffer =
5521 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5522 *(pendingRequest.input_buffer) = *(request->input_buffer);
5523 pInputBuffer = pendingRequest.input_buffer;
5524 } else {
5525 pendingRequest.input_buffer = NULL;
5526 pInputBuffer = NULL;
5527 }
5528
5529 pendingRequest.pipeline_depth = 0;
5530 pendingRequest.partial_result_cnt = 0;
5531 extractJpegMetadata(mCurJpegMeta, request);
5532 pendingRequest.jpegMetadata = mCurJpegMeta;
5533 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5534 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005535 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005536 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5537 mHybridAeEnable =
5538 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5539 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005540
5541 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5542 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005543 /* DevCamDebug metadata processCaptureRequest */
5544 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5545 mDevCamDebugMetaEnable =
5546 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5547 }
5548 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5549 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005550
5551 //extract CAC info
5552 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5553 mCacMode =
5554 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5555 }
5556 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005557 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005558
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005559 // extract enableZsl info
5560 if (gExposeEnableZslKey) {
5561 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5562 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5563 mZslEnabled = pendingRequest.enableZsl;
5564 } else {
5565 pendingRequest.enableZsl = mZslEnabled;
5566 }
5567 }
5568
Thierry Strudel3d639192016-09-09 11:52:26 -07005569 PendingBuffersInRequest bufsForCurRequest;
5570 bufsForCurRequest.frame_number = frameNumber;
5571 // Mark current timestamp for the new request
5572 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005573 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005574
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005575 if (hdrPlusRequest) {
5576 // Save settings for this request.
5577 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5578 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5579
5580 // Add to pending HDR+ request queue.
5581 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5582 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5583
5584 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5585 }
5586
Thierry Strudel3d639192016-09-09 11:52:26 -07005587 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005588 if ((request->output_buffers[i].stream->data_space ==
5589 HAL_DATASPACE_DEPTH) &&
5590 (HAL_PIXEL_FORMAT_BLOB ==
5591 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005592 continue;
5593 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005594 RequestedBufferInfo requestedBuf;
5595 memset(&requestedBuf, 0, sizeof(requestedBuf));
5596 requestedBuf.stream = request->output_buffers[i].stream;
5597 requestedBuf.buffer = NULL;
5598 pendingRequest.buffers.push_back(requestedBuf);
5599
5600 // Add to buffer handle the pending buffers list
5601 PendingBufferInfo bufferInfo;
5602 bufferInfo.buffer = request->output_buffers[i].buffer;
5603 bufferInfo.stream = request->output_buffers[i].stream;
5604 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5605 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5606 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5607 frameNumber, bufferInfo.buffer,
5608 channel->getStreamTypeMask(), bufferInfo.stream->format);
5609 }
5610 // Add this request packet into mPendingBuffersMap
5611 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5612 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5613 mPendingBuffersMap.get_num_overall_buffers());
5614
5615 latestRequest = mPendingRequestsList.insert(
5616 mPendingRequestsList.end(), pendingRequest);
5617 if(mFlush) {
5618 LOGI("mFlush is true");
5619 pthread_mutex_unlock(&mMutex);
5620 return NO_ERROR;
5621 }
5622
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005623 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5624 // channel.
5625 if (!hdrPlusRequest) {
5626 int indexUsed;
5627 // Notify metadata channel we receive a request
5628 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005629
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005630 if(request->input_buffer != NULL){
5631 LOGD("Input request, frame_number %d", frameNumber);
5632 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5633 if (NO_ERROR != rc) {
5634 LOGE("fail to set reproc parameters");
5635 pthread_mutex_unlock(&mMutex);
5636 return rc;
5637 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005638 }
5639
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005640 // Call request on other streams
5641 uint32_t streams_need_metadata = 0;
5642 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5643 for (size_t i = 0; i < request->num_output_buffers; i++) {
5644 const camera3_stream_buffer_t& output = request->output_buffers[i];
5645 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5646
5647 if (channel == NULL) {
5648 LOGW("invalid channel pointer for stream");
5649 continue;
5650 }
5651
5652 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5653 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5654 output.buffer, request->input_buffer, frameNumber);
5655 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005656 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005657 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5658 if (rc < 0) {
5659 LOGE("Fail to request on picture channel");
5660 pthread_mutex_unlock(&mMutex);
5661 return rc;
5662 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005663 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005664 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5665 assert(NULL != mDepthChannel);
5666 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005667
Emilian Peev7650c122017-01-19 08:24:33 -08005668 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5669 if (rc < 0) {
5670 LOGE("Fail to map on depth buffer");
5671 pthread_mutex_unlock(&mMutex);
5672 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005673 }
Emilian Peev7650c122017-01-19 08:24:33 -08005674 } else {
5675 LOGD("snapshot request with buffer %p, frame_number %d",
5676 output.buffer, frameNumber);
5677 if (!request->settings) {
5678 rc = channel->request(output.buffer, frameNumber,
5679 NULL, mPrevParameters, indexUsed);
5680 } else {
5681 rc = channel->request(output.buffer, frameNumber,
5682 NULL, mParameters, indexUsed);
5683 }
5684 if (rc < 0) {
5685 LOGE("Fail to request on picture channel");
5686 pthread_mutex_unlock(&mMutex);
5687 return rc;
5688 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005689
Emilian Peev7650c122017-01-19 08:24:33 -08005690 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5691 uint32_t j = 0;
5692 for (j = 0; j < streamsArray.num_streams; j++) {
5693 if (streamsArray.stream_request[j].streamID == streamId) {
5694 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5695 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5696 else
5697 streamsArray.stream_request[j].buf_index = indexUsed;
5698 break;
5699 }
5700 }
5701 if (j == streamsArray.num_streams) {
5702 LOGE("Did not find matching stream to update index");
5703 assert(0);
5704 }
5705
5706 pendingBufferIter->need_metadata = true;
5707 streams_need_metadata++;
5708 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005709 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005710 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5711 bool needMetadata = false;
5712 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5713 rc = yuvChannel->request(output.buffer, frameNumber,
5714 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5715 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005716 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005717 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005718 pthread_mutex_unlock(&mMutex);
5719 return rc;
5720 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005721
5722 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5723 uint32_t j = 0;
5724 for (j = 0; j < streamsArray.num_streams; j++) {
5725 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005726 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5727 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5728 else
5729 streamsArray.stream_request[j].buf_index = indexUsed;
5730 break;
5731 }
5732 }
5733 if (j == streamsArray.num_streams) {
5734 LOGE("Did not find matching stream to update index");
5735 assert(0);
5736 }
5737
5738 pendingBufferIter->need_metadata = needMetadata;
5739 if (needMetadata)
5740 streams_need_metadata += 1;
5741 LOGD("calling YUV channel request, need_metadata is %d",
5742 needMetadata);
5743 } else {
5744 LOGD("request with buffer %p, frame_number %d",
5745 output.buffer, frameNumber);
5746
5747 rc = channel->request(output.buffer, frameNumber, indexUsed);
5748
5749 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5750 uint32_t j = 0;
5751 for (j = 0; j < streamsArray.num_streams; j++) {
5752 if (streamsArray.stream_request[j].streamID == streamId) {
5753 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5754 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5755 else
5756 streamsArray.stream_request[j].buf_index = indexUsed;
5757 break;
5758 }
5759 }
5760 if (j == streamsArray.num_streams) {
5761 LOGE("Did not find matching stream to update index");
5762 assert(0);
5763 }
5764
5765 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5766 && mBatchSize) {
5767 mToBeQueuedVidBufs++;
5768 if (mToBeQueuedVidBufs == mBatchSize) {
5769 channel->queueBatchBuf();
5770 }
5771 }
5772 if (rc < 0) {
5773 LOGE("request failed");
5774 pthread_mutex_unlock(&mMutex);
5775 return rc;
5776 }
5777 }
5778 pendingBufferIter++;
5779 }
5780
5781 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5782 itr++) {
5783 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5784
5785 if (channel == NULL) {
5786 LOGE("invalid channel pointer for stream");
5787 assert(0);
5788 return BAD_VALUE;
5789 }
5790
5791 InternalRequest requestedStream;
5792 requestedStream = (*itr);
5793
5794
5795 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5796 LOGD("snapshot request internally input buffer %p, frame_number %d",
5797 request->input_buffer, frameNumber);
5798 if(request->input_buffer != NULL){
5799 rc = channel->request(NULL, frameNumber,
5800 pInputBuffer, &mReprocMeta, indexUsed, true,
5801 requestedStream.meteringOnly);
5802 if (rc < 0) {
5803 LOGE("Fail to request on picture channel");
5804 pthread_mutex_unlock(&mMutex);
5805 return rc;
5806 }
5807 } else {
5808 LOGD("snapshot request with frame_number %d", frameNumber);
5809 if (!request->settings) {
5810 rc = channel->request(NULL, frameNumber,
5811 NULL, mPrevParameters, indexUsed, true,
5812 requestedStream.meteringOnly);
5813 } else {
5814 rc = channel->request(NULL, frameNumber,
5815 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5816 }
5817 if (rc < 0) {
5818 LOGE("Fail to request on picture channel");
5819 pthread_mutex_unlock(&mMutex);
5820 return rc;
5821 }
5822
5823 if ((*itr).meteringOnly != 1) {
5824 requestedStream.need_metadata = 1;
5825 streams_need_metadata++;
5826 }
5827 }
5828
5829 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5830 uint32_t j = 0;
5831 for (j = 0; j < streamsArray.num_streams; j++) {
5832 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005833 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5834 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5835 else
5836 streamsArray.stream_request[j].buf_index = indexUsed;
5837 break;
5838 }
5839 }
5840 if (j == streamsArray.num_streams) {
5841 LOGE("Did not find matching stream to update index");
5842 assert(0);
5843 }
5844
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005845 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005846 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005847 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005848 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005849 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005850 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005851 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005852
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005853 //If 2 streams have need_metadata set to true, fail the request, unless
5854 //we copy/reference count the metadata buffer
5855 if (streams_need_metadata > 1) {
5856 LOGE("not supporting request in which two streams requires"
5857 " 2 HAL metadata for reprocessing");
5858 pthread_mutex_unlock(&mMutex);
5859 return -EINVAL;
5860 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005861
Emilian Peev7650c122017-01-19 08:24:33 -08005862 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5863 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5864 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5865 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5866 pthread_mutex_unlock(&mMutex);
5867 return BAD_VALUE;
5868 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005869 if (request->input_buffer == NULL) {
5870 /* Set the parameters to backend:
5871 * - For every request in NORMAL MODE
5872 * - For every request in HFR mode during preview only case
5873 * - Once every batch in HFR mode during video recording
5874 */
5875 if (!mBatchSize ||
5876 (mBatchSize && !isVidBufRequested) ||
5877 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5878 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5879 mBatchSize, isVidBufRequested,
5880 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005881
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005882 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5883 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5884 uint32_t m = 0;
5885 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5886 if (streamsArray.stream_request[k].streamID ==
5887 mBatchedStreamsArray.stream_request[m].streamID)
5888 break;
5889 }
5890 if (m == mBatchedStreamsArray.num_streams) {
5891 mBatchedStreamsArray.stream_request\
5892 [mBatchedStreamsArray.num_streams].streamID =
5893 streamsArray.stream_request[k].streamID;
5894 mBatchedStreamsArray.stream_request\
5895 [mBatchedStreamsArray.num_streams].buf_index =
5896 streamsArray.stream_request[k].buf_index;
5897 mBatchedStreamsArray.num_streams =
5898 mBatchedStreamsArray.num_streams + 1;
5899 }
5900 }
5901 streamsArray = mBatchedStreamsArray;
5902 }
5903 /* Update stream id of all the requested buffers */
5904 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5905 streamsArray)) {
5906 LOGE("Failed to set stream type mask in the parameters");
5907 return BAD_VALUE;
5908 }
5909
5910 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5911 mParameters);
5912 if (rc < 0) {
5913 LOGE("set_parms failed");
5914 }
5915 /* reset to zero coz, the batch is queued */
5916 mToBeQueuedVidBufs = 0;
5917 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5918 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5919 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005920 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5921 uint32_t m = 0;
5922 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5923 if (streamsArray.stream_request[k].streamID ==
5924 mBatchedStreamsArray.stream_request[m].streamID)
5925 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005926 }
5927 if (m == mBatchedStreamsArray.num_streams) {
5928 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5929 streamID = streamsArray.stream_request[k].streamID;
5930 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5931 buf_index = streamsArray.stream_request[k].buf_index;
5932 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5933 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005934 }
5935 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005936 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005937 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005938 }
5939
5940 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5941
5942 mState = STARTED;
5943 // Added a timed condition wait
5944 struct timespec ts;
5945 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005946 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005947 if (rc < 0) {
5948 isValidTimeout = 0;
5949 LOGE("Error reading the real time clock!!");
5950 }
5951 else {
5952 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005953 int64_t timeout = 5;
5954 {
5955 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5956 // If there is a pending HDR+ request, the following requests may be blocked until the
5957 // HDR+ request is done. So allow a longer timeout.
5958 if (mHdrPlusPendingRequests.size() > 0) {
5959 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5960 }
5961 }
5962 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005963 }
5964 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005965 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005966 (mState != ERROR) && (mState != DEINIT)) {
5967 if (!isValidTimeout) {
5968 LOGD("Blocking on conditional wait");
5969 pthread_cond_wait(&mRequestCond, &mMutex);
5970 }
5971 else {
5972 LOGD("Blocking on timed conditional wait");
5973 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5974 if (rc == ETIMEDOUT) {
5975 rc = -ENODEV;
5976 LOGE("Unblocked on timeout!!!!");
5977 break;
5978 }
5979 }
5980 LOGD("Unblocked");
5981 if (mWokenUpByDaemon) {
5982 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005983 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005984 break;
5985 }
5986 }
5987 pthread_mutex_unlock(&mMutex);
5988
5989 return rc;
5990}
5991
5992/*===========================================================================
5993 * FUNCTION : dump
5994 *
5995 * DESCRIPTION:
5996 *
5997 * PARAMETERS :
5998 *
5999 *
6000 * RETURN :
6001 *==========================================================================*/
6002void QCamera3HardwareInterface::dump(int fd)
6003{
6004 pthread_mutex_lock(&mMutex);
6005 dprintf(fd, "\n Camera HAL3 information Begin \n");
6006
6007 dprintf(fd, "\nNumber of pending requests: %zu \n",
6008 mPendingRequestsList.size());
6009 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6010 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6011 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6012 for(pendingRequestIterator i = mPendingRequestsList.begin();
6013 i != mPendingRequestsList.end(); i++) {
6014 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6015 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6016 i->input_buffer);
6017 }
6018 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6019 mPendingBuffersMap.get_num_overall_buffers());
6020 dprintf(fd, "-------+------------------\n");
6021 dprintf(fd, " Frame | Stream type mask \n");
6022 dprintf(fd, "-------+------------------\n");
6023 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6024 for(auto &j : req.mPendingBufferList) {
6025 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6026 dprintf(fd, " %5d | %11d \n",
6027 req.frame_number, channel->getStreamTypeMask());
6028 }
6029 }
6030 dprintf(fd, "-------+------------------\n");
6031
6032 dprintf(fd, "\nPending frame drop list: %zu\n",
6033 mPendingFrameDropList.size());
6034 dprintf(fd, "-------+-----------\n");
6035 dprintf(fd, " Frame | Stream ID \n");
6036 dprintf(fd, "-------+-----------\n");
6037 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6038 i != mPendingFrameDropList.end(); i++) {
6039 dprintf(fd, " %5d | %9d \n",
6040 i->frame_number, i->stream_ID);
6041 }
6042 dprintf(fd, "-------+-----------\n");
6043
6044 dprintf(fd, "\n Camera HAL3 information End \n");
6045
6046 /* use dumpsys media.camera as trigger to send update debug level event */
6047 mUpdateDebugLevel = true;
6048 pthread_mutex_unlock(&mMutex);
6049 return;
6050}
6051
6052/*===========================================================================
6053 * FUNCTION : flush
6054 *
6055 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6056 * conditionally restarts channels
6057 *
6058 * PARAMETERS :
6059 * @ restartChannels: re-start all channels
6060 *
6061 *
6062 * RETURN :
6063 * 0 on success
6064 * Error code on failure
6065 *==========================================================================*/
6066int QCamera3HardwareInterface::flush(bool restartChannels)
6067{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006068 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006069 int32_t rc = NO_ERROR;
6070
6071 LOGD("Unblocking Process Capture Request");
6072 pthread_mutex_lock(&mMutex);
6073 mFlush = true;
6074 pthread_mutex_unlock(&mMutex);
6075
6076 rc = stopAllChannels();
6077 // unlink of dualcam
6078 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006079 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6080 &m_pDualCamCmdPtr->bundle_info;
6081 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006082 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6083 pthread_mutex_lock(&gCamLock);
6084
6085 if (mIsMainCamera == 1) {
6086 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6087 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006088 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006089 // related session id should be session id of linked session
6090 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6091 } else {
6092 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6093 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006094 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006095 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6096 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006097 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006098 pthread_mutex_unlock(&gCamLock);
6099
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006100 rc = mCameraHandle->ops->set_dual_cam_cmd(
6101 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006102 if (rc < 0) {
6103 LOGE("Dualcam: Unlink failed, but still proceed to close");
6104 }
6105 }
6106
6107 if (rc < 0) {
6108 LOGE("stopAllChannels failed");
6109 return rc;
6110 }
6111 if (mChannelHandle) {
6112 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6113 mChannelHandle);
6114 }
6115
6116 // Reset bundle info
6117 rc = setBundleInfo();
6118 if (rc < 0) {
6119 LOGE("setBundleInfo failed %d", rc);
6120 return rc;
6121 }
6122
6123 // Mutex Lock
6124 pthread_mutex_lock(&mMutex);
6125
6126 // Unblock process_capture_request
6127 mPendingLiveRequest = 0;
6128 pthread_cond_signal(&mRequestCond);
6129
6130 rc = notifyErrorForPendingRequests();
6131 if (rc < 0) {
6132 LOGE("notifyErrorForPendingRequests failed");
6133 pthread_mutex_unlock(&mMutex);
6134 return rc;
6135 }
6136
6137 mFlush = false;
6138
6139 // Start the Streams/Channels
6140 if (restartChannels) {
6141 rc = startAllChannels();
6142 if (rc < 0) {
6143 LOGE("startAllChannels failed");
6144 pthread_mutex_unlock(&mMutex);
6145 return rc;
6146 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006147 if (mChannelHandle) {
6148 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6149 mChannelHandle);
6150 if (rc < 0) {
6151 LOGE("start_channel failed");
6152 pthread_mutex_unlock(&mMutex);
6153 return rc;
6154 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 }
6156 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006157 pthread_mutex_unlock(&mMutex);
6158
6159 return 0;
6160}
6161
6162/*===========================================================================
6163 * FUNCTION : flushPerf
6164 *
6165 * DESCRIPTION: This is the performance optimization version of flush that does
6166 * not use stream off, rather flushes the system
6167 *
6168 * PARAMETERS :
6169 *
6170 *
6171 * RETURN : 0 : success
6172 * -EINVAL: input is malformed (device is not valid)
6173 * -ENODEV: if the device has encountered a serious error
6174 *==========================================================================*/
6175int QCamera3HardwareInterface::flushPerf()
6176{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006177 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006178 int32_t rc = 0;
6179 struct timespec timeout;
6180 bool timed_wait = false;
6181
6182 pthread_mutex_lock(&mMutex);
6183 mFlushPerf = true;
6184 mPendingBuffersMap.numPendingBufsAtFlush =
6185 mPendingBuffersMap.get_num_overall_buffers();
6186 LOGD("Calling flush. Wait for %d buffers to return",
6187 mPendingBuffersMap.numPendingBufsAtFlush);
6188
6189 /* send the flush event to the backend */
6190 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6191 if (rc < 0) {
6192 LOGE("Error in flush: IOCTL failure");
6193 mFlushPerf = false;
6194 pthread_mutex_unlock(&mMutex);
6195 return -ENODEV;
6196 }
6197
6198 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6199 LOGD("No pending buffers in HAL, return flush");
6200 mFlushPerf = false;
6201 pthread_mutex_unlock(&mMutex);
6202 return rc;
6203 }
6204
6205 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006206 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006207 if (rc < 0) {
6208 LOGE("Error reading the real time clock, cannot use timed wait");
6209 } else {
6210 timeout.tv_sec += FLUSH_TIMEOUT;
6211 timed_wait = true;
6212 }
6213
6214 //Block on conditional variable
6215 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6216 LOGD("Waiting on mBuffersCond");
6217 if (!timed_wait) {
6218 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6219 if (rc != 0) {
6220 LOGE("pthread_cond_wait failed due to rc = %s",
6221 strerror(rc));
6222 break;
6223 }
6224 } else {
6225 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6226 if (rc != 0) {
6227 LOGE("pthread_cond_timedwait failed due to rc = %s",
6228 strerror(rc));
6229 break;
6230 }
6231 }
6232 }
6233 if (rc != 0) {
6234 mFlushPerf = false;
6235 pthread_mutex_unlock(&mMutex);
6236 return -ENODEV;
6237 }
6238
6239 LOGD("Received buffers, now safe to return them");
6240
6241 //make sure the channels handle flush
6242 //currently only required for the picture channel to release snapshot resources
6243 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6244 it != mStreamInfo.end(); it++) {
6245 QCamera3Channel *channel = (*it)->channel;
6246 if (channel) {
6247 rc = channel->flush();
6248 if (rc) {
6249 LOGE("Flushing the channels failed with error %d", rc);
6250 // even though the channel flush failed we need to continue and
6251 // return the buffers we have to the framework, however the return
6252 // value will be an error
6253 rc = -ENODEV;
6254 }
6255 }
6256 }
6257
6258 /* notify the frameworks and send errored results */
6259 rc = notifyErrorForPendingRequests();
6260 if (rc < 0) {
6261 LOGE("notifyErrorForPendingRequests failed");
6262 pthread_mutex_unlock(&mMutex);
6263 return rc;
6264 }
6265
6266 //unblock process_capture_request
6267 mPendingLiveRequest = 0;
6268 unblockRequestIfNecessary();
6269
6270 mFlushPerf = false;
6271 pthread_mutex_unlock(&mMutex);
6272 LOGD ("Flush Operation complete. rc = %d", rc);
6273 return rc;
6274}
6275
6276/*===========================================================================
6277 * FUNCTION : handleCameraDeviceError
6278 *
6279 * DESCRIPTION: This function calls internal flush and notifies the error to
6280 * framework and updates the state variable.
6281 *
6282 * PARAMETERS : None
6283 *
6284 * RETURN : NO_ERROR on Success
6285 * Error code on failure
6286 *==========================================================================*/
6287int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6288{
6289 int32_t rc = NO_ERROR;
6290
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006291 {
6292 Mutex::Autolock lock(mFlushLock);
6293 pthread_mutex_lock(&mMutex);
6294 if (mState != ERROR) {
6295 //if mState != ERROR, nothing to be done
6296 pthread_mutex_unlock(&mMutex);
6297 return NO_ERROR;
6298 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006299 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006300
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006301 rc = flush(false /* restart channels */);
6302 if (NO_ERROR != rc) {
6303 LOGE("internal flush to handle mState = ERROR failed");
6304 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006305
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006306 pthread_mutex_lock(&mMutex);
6307 mState = DEINIT;
6308 pthread_mutex_unlock(&mMutex);
6309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006310
6311 camera3_notify_msg_t notify_msg;
6312 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6313 notify_msg.type = CAMERA3_MSG_ERROR;
6314 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6315 notify_msg.message.error.error_stream = NULL;
6316 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006317 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006318
6319 return rc;
6320}
6321
6322/*===========================================================================
6323 * FUNCTION : captureResultCb
6324 *
6325 * DESCRIPTION: Callback handler for all capture result
6326 * (streams, as well as metadata)
6327 *
6328 * PARAMETERS :
6329 * @metadata : metadata information
6330 * @buffer : actual gralloc buffer to be returned to frameworks.
6331 * NULL if metadata.
6332 *
6333 * RETURN : NONE
6334 *==========================================================================*/
6335void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6336 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6337{
6338 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006339 pthread_mutex_lock(&mMutex);
6340 uint8_t batchSize = mBatchSize;
6341 pthread_mutex_unlock(&mMutex);
6342 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006343 handleBatchMetadata(metadata_buf,
6344 true /* free_and_bufdone_meta_buf */);
6345 } else { /* mBatchSize = 0 */
6346 hdrPlusPerfLock(metadata_buf);
6347 pthread_mutex_lock(&mMutex);
6348 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006349 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006350 true /* last urgent frame of batch metadata */,
6351 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006352 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006353 pthread_mutex_unlock(&mMutex);
6354 }
6355 } else if (isInputBuffer) {
6356 pthread_mutex_lock(&mMutex);
6357 handleInputBufferWithLock(frame_number);
6358 pthread_mutex_unlock(&mMutex);
6359 } else {
6360 pthread_mutex_lock(&mMutex);
6361 handleBufferWithLock(buffer, frame_number);
6362 pthread_mutex_unlock(&mMutex);
6363 }
6364 return;
6365}
6366
6367/*===========================================================================
6368 * FUNCTION : getReprocessibleOutputStreamId
6369 *
6370 * DESCRIPTION: Get source output stream id for the input reprocess stream
6371 * based on size and format, which would be the largest
6372 * output stream if an input stream exists.
6373 *
6374 * PARAMETERS :
6375 * @id : return the stream id if found
6376 *
6377 * RETURN : int32_t type of status
6378 * NO_ERROR -- success
6379 * none-zero failure code
6380 *==========================================================================*/
6381int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6382{
6383 /* check if any output or bidirectional stream with the same size and format
6384 and return that stream */
6385 if ((mInputStreamInfo.dim.width > 0) &&
6386 (mInputStreamInfo.dim.height > 0)) {
6387 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6388 it != mStreamInfo.end(); it++) {
6389
6390 camera3_stream_t *stream = (*it)->stream;
6391 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6392 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6393 (stream->format == mInputStreamInfo.format)) {
6394 // Usage flag for an input stream and the source output stream
6395 // may be different.
6396 LOGD("Found reprocessible output stream! %p", *it);
6397 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6398 stream->usage, mInputStreamInfo.usage);
6399
6400 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6401 if (channel != NULL && channel->mStreams[0]) {
6402 id = channel->mStreams[0]->getMyServerID();
6403 return NO_ERROR;
6404 }
6405 }
6406 }
6407 } else {
6408 LOGD("No input stream, so no reprocessible output stream");
6409 }
6410 return NAME_NOT_FOUND;
6411}
6412
6413/*===========================================================================
6414 * FUNCTION : lookupFwkName
6415 *
6416 * DESCRIPTION: In case the enum is not same in fwk and backend
6417 * make sure the parameter is correctly propogated
6418 *
6419 * PARAMETERS :
6420 * @arr : map between the two enums
6421 * @len : len of the map
6422 * @hal_name : name of the hal_parm to map
6423 *
6424 * RETURN : int type of status
6425 * fwk_name -- success
6426 * none-zero failure code
6427 *==========================================================================*/
6428template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6429 size_t len, halType hal_name)
6430{
6431
6432 for (size_t i = 0; i < len; i++) {
6433 if (arr[i].hal_name == hal_name) {
6434 return arr[i].fwk_name;
6435 }
6436 }
6437
6438 /* Not able to find matching framework type is not necessarily
6439 * an error case. This happens when mm-camera supports more attributes
6440 * than the frameworks do */
6441 LOGH("Cannot find matching framework type");
6442 return NAME_NOT_FOUND;
6443}
6444
6445/*===========================================================================
6446 * FUNCTION : lookupHalName
6447 *
6448 * DESCRIPTION: In case the enum is not same in fwk and backend
6449 * make sure the parameter is correctly propogated
6450 *
6451 * PARAMETERS :
6452 * @arr : map between the two enums
6453 * @len : len of the map
6454 * @fwk_name : name of the hal_parm to map
6455 *
6456 * RETURN : int32_t type of status
6457 * hal_name -- success
6458 * none-zero failure code
6459 *==========================================================================*/
6460template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6461 size_t len, fwkType fwk_name)
6462{
6463 for (size_t i = 0; i < len; i++) {
6464 if (arr[i].fwk_name == fwk_name) {
6465 return arr[i].hal_name;
6466 }
6467 }
6468
6469 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6470 return NAME_NOT_FOUND;
6471}
6472
6473/*===========================================================================
6474 * FUNCTION : lookupProp
6475 *
6476 * DESCRIPTION: lookup a value by its name
6477 *
6478 * PARAMETERS :
6479 * @arr : map between the two enums
6480 * @len : size of the map
6481 * @name : name to be looked up
6482 *
6483 * RETURN : Value if found
6484 * CAM_CDS_MODE_MAX if not found
6485 *==========================================================================*/
6486template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6487 size_t len, const char *name)
6488{
6489 if (name) {
6490 for (size_t i = 0; i < len; i++) {
6491 if (!strcmp(arr[i].desc, name)) {
6492 return arr[i].val;
6493 }
6494 }
6495 }
6496 return CAM_CDS_MODE_MAX;
6497}
6498
6499/*===========================================================================
6500 *
6501 * DESCRIPTION:
6502 *
6503 * PARAMETERS :
6504 * @metadata : metadata information from callback
6505 * @timestamp: metadata buffer timestamp
6506 * @request_id: request id
6507 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006508 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006509 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6510 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006511 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006512 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6513 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006514 *
6515 * RETURN : camera_metadata_t*
6516 * metadata in a format specified by fwk
6517 *==========================================================================*/
6518camera_metadata_t*
6519QCamera3HardwareInterface::translateFromHalMetadata(
6520 metadata_buffer_t *metadata,
6521 nsecs_t timestamp,
6522 int32_t request_id,
6523 const CameraMetadata& jpegMetadata,
6524 uint8_t pipeline_depth,
6525 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006526 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006527 /* DevCamDebug metadata translateFromHalMetadata argument */
6528 uint8_t DevCamDebug_meta_enable,
6529 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006530 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006531 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006532 bool lastMetadataInBatch,
6533 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006534{
6535 CameraMetadata camMetadata;
6536 camera_metadata_t *resultMetadata;
6537
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006538 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006539 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6540 * Timestamp is needed because it's used for shutter notify calculation.
6541 * */
6542 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6543 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006544 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006545 }
6546
Thierry Strudel3d639192016-09-09 11:52:26 -07006547 if (jpegMetadata.entryCount())
6548 camMetadata.append(jpegMetadata);
6549
6550 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6551 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6552 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6553 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006554 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006555 if (mBatchSize == 0) {
6556 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6557 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6558 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006559
Samuel Ha68ba5172016-12-15 18:41:12 -08006560 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6561 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6562 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6563 // DevCamDebug metadata translateFromHalMetadata AF
6564 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6565 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6566 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6567 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6568 }
6569 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6570 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6571 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6572 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6573 }
6574 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6575 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6576 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6577 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6578 }
6579 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6580 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6581 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6582 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6583 }
6584 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6585 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6586 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6587 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6588 }
6589 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6590 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6591 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6592 *DevCamDebug_af_monitor_pdaf_target_pos;
6593 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6594 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6595 }
6596 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6597 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6598 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6599 *DevCamDebug_af_monitor_pdaf_confidence;
6600 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6601 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6602 }
6603 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6604 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6605 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6606 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6607 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6608 }
6609 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6610 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6611 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6612 *DevCamDebug_af_monitor_tof_target_pos;
6613 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6614 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6615 }
6616 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6617 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6618 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6619 *DevCamDebug_af_monitor_tof_confidence;
6620 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6621 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6622 }
6623 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6624 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6625 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6626 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6627 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6628 }
6629 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6630 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6631 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6632 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6633 &fwk_DevCamDebug_af_monitor_type_select, 1);
6634 }
6635 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6636 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6637 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6638 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6639 &fwk_DevCamDebug_af_monitor_refocus, 1);
6640 }
6641 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6642 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6643 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6644 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6645 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6646 }
6647 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6648 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6649 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6650 *DevCamDebug_af_search_pdaf_target_pos;
6651 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6652 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6653 }
6654 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6655 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6656 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6657 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6658 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6659 }
6660 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6661 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6662 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6663 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6664 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6665 }
6666 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6667 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6668 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6669 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6670 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6671 }
6672 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6673 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6674 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6675 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6676 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6677 }
6678 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6679 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6680 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6681 *DevCamDebug_af_search_tof_target_pos;
6682 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6683 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6684 }
6685 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6686 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6687 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6688 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6689 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6690 }
6691 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6692 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6693 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6694 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6695 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6696 }
6697 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6698 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6699 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6700 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6701 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6702 }
6703 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6704 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6705 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6706 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6707 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6708 }
6709 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6710 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6711 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6712 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6713 &fwk_DevCamDebug_af_search_type_select, 1);
6714 }
6715 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6716 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6717 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6718 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6719 &fwk_DevCamDebug_af_search_next_pos, 1);
6720 }
6721 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6722 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6723 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6724 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6725 &fwk_DevCamDebug_af_search_target_pos, 1);
6726 }
6727 // DevCamDebug metadata translateFromHalMetadata AEC
6728 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6729 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6730 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6731 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6732 }
6733 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6734 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6735 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6736 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6737 }
6738 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6739 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6740 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6741 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6742 }
6743 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6744 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6745 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6746 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6747 }
6748 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6749 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6750 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6751 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6752 }
6753 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6754 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6755 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6756 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6757 }
6758 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6759 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6760 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6761 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6762 }
6763 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6764 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6765 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6766 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6767 }
Samuel Ha34229982017-02-17 13:51:11 -08006768 // DevCamDebug metadata translateFromHalMetadata zzHDR
6769 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6770 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6771 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6772 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6773 }
6774 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6775 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006776 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006777 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6778 }
6779 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6780 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6781 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6782 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6783 }
6784 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6785 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006786 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006787 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6788 }
6789 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6790 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6791 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6792 *DevCamDebug_aec_hdr_sensitivity_ratio;
6793 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6794 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6795 }
6796 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6797 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6798 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6799 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6800 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6801 }
6802 // DevCamDebug metadata translateFromHalMetadata ADRC
6803 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6804 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6805 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6806 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6807 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6808 }
6809 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6810 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6811 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6812 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6813 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6814 }
6815 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6816 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6817 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6818 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6819 }
6820 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6821 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6822 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6823 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6824 }
6825 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6826 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6827 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6828 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6829 }
6830 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6831 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6832 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6833 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6834 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006835 // DevCamDebug metadata translateFromHalMetadata AWB
6836 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6837 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6838 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6839 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6840 }
6841 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6842 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6843 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6844 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6845 }
6846 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6847 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6848 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6849 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6850 }
6851 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6852 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6853 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6854 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6855 }
6856 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6857 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6858 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6859 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6860 }
6861 }
6862 // atrace_end(ATRACE_TAG_ALWAYS);
6863
Thierry Strudel3d639192016-09-09 11:52:26 -07006864 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6865 int64_t fwk_frame_number = *frame_number;
6866 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6867 }
6868
6869 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6870 int32_t fps_range[2];
6871 fps_range[0] = (int32_t)float_range->min_fps;
6872 fps_range[1] = (int32_t)float_range->max_fps;
6873 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6874 fps_range, 2);
6875 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6876 fps_range[0], fps_range[1]);
6877 }
6878
6879 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6880 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6881 }
6882
6883 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6884 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6885 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6886 *sceneMode);
6887 if (NAME_NOT_FOUND != val) {
6888 uint8_t fwkSceneMode = (uint8_t)val;
6889 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6890 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6891 fwkSceneMode);
6892 }
6893 }
6894
6895 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6896 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6897 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6898 }
6899
6900 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6901 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6902 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6903 }
6904
6905 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6906 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6907 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6908 }
6909
6910 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6911 CAM_INTF_META_EDGE_MODE, metadata) {
6912 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6913 }
6914
6915 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6916 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6917 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6918 }
6919
6920 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6921 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6922 }
6923
6924 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6925 if (0 <= *flashState) {
6926 uint8_t fwk_flashState = (uint8_t) *flashState;
6927 if (!gCamCapability[mCameraId]->flash_available) {
6928 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6929 }
6930 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6931 }
6932 }
6933
6934 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6935 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6936 if (NAME_NOT_FOUND != val) {
6937 uint8_t fwk_flashMode = (uint8_t)val;
6938 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6939 }
6940 }
6941
6942 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6943 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6944 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6945 }
6946
6947 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6948 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6949 }
6950
6951 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6952 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6953 }
6954
6955 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6956 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6957 }
6958
6959 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6960 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6961 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6962 }
6963
6964 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6965 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6966 LOGD("fwk_videoStab = %d", fwk_videoStab);
6967 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6968 } else {
6969 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6970 // and so hardcoding the Video Stab result to OFF mode.
6971 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6972 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006973 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006974 }
6975
6976 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6977 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6978 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6979 }
6980
6981 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6982 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6983 }
6984
Thierry Strudel3d639192016-09-09 11:52:26 -07006985 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6986 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006987 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006988
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006989 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6990 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006991
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006992 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006993 blackLevelAppliedPattern->cam_black_level[0],
6994 blackLevelAppliedPattern->cam_black_level[1],
6995 blackLevelAppliedPattern->cam_black_level[2],
6996 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006997 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6998 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006999
7000#ifndef USE_HAL_3_3
7001 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307002 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007003 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307004 fwk_blackLevelInd[0] /= 16.0;
7005 fwk_blackLevelInd[1] /= 16.0;
7006 fwk_blackLevelInd[2] /= 16.0;
7007 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007008 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7009 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007010#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007011 }
7012
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007013#ifndef USE_HAL_3_3
7014 // Fixed whitelevel is used by ISP/Sensor
7015 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7016 &gCamCapability[mCameraId]->white_level, 1);
7017#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007018
7019 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7020 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7021 int32_t scalerCropRegion[4];
7022 scalerCropRegion[0] = hScalerCropRegion->left;
7023 scalerCropRegion[1] = hScalerCropRegion->top;
7024 scalerCropRegion[2] = hScalerCropRegion->width;
7025 scalerCropRegion[3] = hScalerCropRegion->height;
7026
7027 // Adjust crop region from sensor output coordinate system to active
7028 // array coordinate system.
7029 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7030 scalerCropRegion[2], scalerCropRegion[3]);
7031
7032 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7033 }
7034
7035 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7036 LOGD("sensorExpTime = %lld", *sensorExpTime);
7037 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7038 }
7039
7040 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7041 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7042 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7043 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7044 }
7045
7046 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7047 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7048 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7049 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7050 sensorRollingShutterSkew, 1);
7051 }
7052
7053 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7054 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7055 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7056
7057 //calculate the noise profile based on sensitivity
7058 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7059 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7060 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7061 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7062 noise_profile[i] = noise_profile_S;
7063 noise_profile[i+1] = noise_profile_O;
7064 }
7065 LOGD("noise model entry (S, O) is (%f, %f)",
7066 noise_profile_S, noise_profile_O);
7067 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7068 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7069 }
7070
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007071#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007072 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007073 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007074 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007075 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007076 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7077 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7078 }
7079 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007080#endif
7081
Thierry Strudel3d639192016-09-09 11:52:26 -07007082 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7083 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7084 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7085 }
7086
7087 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7088 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7089 *faceDetectMode);
7090 if (NAME_NOT_FOUND != val) {
7091 uint8_t fwk_faceDetectMode = (uint8_t)val;
7092 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7093
7094 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7095 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7096 CAM_INTF_META_FACE_DETECTION, metadata) {
7097 uint8_t numFaces = MIN(
7098 faceDetectionInfo->num_faces_detected, MAX_ROI);
7099 int32_t faceIds[MAX_ROI];
7100 uint8_t faceScores[MAX_ROI];
7101 int32_t faceRectangles[MAX_ROI * 4];
7102 int32_t faceLandmarks[MAX_ROI * 6];
7103 size_t j = 0, k = 0;
7104
7105 for (size_t i = 0; i < numFaces; i++) {
7106 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7107 // Adjust crop region from sensor output coordinate system to active
7108 // array coordinate system.
7109 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7110 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7111 rect.width, rect.height);
7112
7113 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7114 faceRectangles+j, -1);
7115
Jason Lee8ce36fa2017-04-19 19:40:37 -07007116 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7117 "bottom-right (%d, %d)",
7118 faceDetectionInfo->frame_id, i,
7119 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7120 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7121
Thierry Strudel3d639192016-09-09 11:52:26 -07007122 j+= 4;
7123 }
7124 if (numFaces <= 0) {
7125 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7126 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7127 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7128 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7129 }
7130
7131 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7132 numFaces);
7133 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7134 faceRectangles, numFaces * 4U);
7135 if (fwk_faceDetectMode ==
7136 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7137 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7138 CAM_INTF_META_FACE_LANDMARK, metadata) {
7139
7140 for (size_t i = 0; i < numFaces; i++) {
7141 // Map the co-ordinate sensor output coordinate system to active
7142 // array coordinate system.
7143 mCropRegionMapper.toActiveArray(
7144 landmarks->face_landmarks[i].left_eye_center.x,
7145 landmarks->face_landmarks[i].left_eye_center.y);
7146 mCropRegionMapper.toActiveArray(
7147 landmarks->face_landmarks[i].right_eye_center.x,
7148 landmarks->face_landmarks[i].right_eye_center.y);
7149 mCropRegionMapper.toActiveArray(
7150 landmarks->face_landmarks[i].mouth_center.x,
7151 landmarks->face_landmarks[i].mouth_center.y);
7152
7153 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007154
7155 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7156 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7157 faceDetectionInfo->frame_id, i,
7158 faceLandmarks[k + LEFT_EYE_X],
7159 faceLandmarks[k + LEFT_EYE_Y],
7160 faceLandmarks[k + RIGHT_EYE_X],
7161 faceLandmarks[k + RIGHT_EYE_Y],
7162 faceLandmarks[k + MOUTH_X],
7163 faceLandmarks[k + MOUTH_Y]);
7164
Thierry Strudel04e026f2016-10-10 11:27:36 -07007165 k+= TOTAL_LANDMARK_INDICES;
7166 }
7167 } else {
7168 for (size_t i = 0; i < numFaces; i++) {
7169 setInvalidLandmarks(faceLandmarks+k);
7170 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007171 }
7172 }
7173
Jason Lee49619db2017-04-13 12:07:22 -07007174 for (size_t i = 0; i < numFaces; i++) {
7175 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7176
7177 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7178 faceDetectionInfo->frame_id, i, faceIds[i]);
7179 }
7180
Thierry Strudel3d639192016-09-09 11:52:26 -07007181 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7182 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7183 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007184 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007185 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7186 CAM_INTF_META_FACE_BLINK, metadata) {
7187 uint8_t detected[MAX_ROI];
7188 uint8_t degree[MAX_ROI * 2];
7189 for (size_t i = 0; i < numFaces; i++) {
7190 detected[i] = blinks->blink[i].blink_detected;
7191 degree[2 * i] = blinks->blink[i].left_blink;
7192 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007193
Jason Lee49619db2017-04-13 12:07:22 -07007194 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7195 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7196 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7197 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007198 }
7199 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7200 detected, numFaces);
7201 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7202 degree, numFaces * 2);
7203 }
7204 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7205 CAM_INTF_META_FACE_SMILE, metadata) {
7206 uint8_t degree[MAX_ROI];
7207 uint8_t confidence[MAX_ROI];
7208 for (size_t i = 0; i < numFaces; i++) {
7209 degree[i] = smiles->smile[i].smile_degree;
7210 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007211
Jason Lee49619db2017-04-13 12:07:22 -07007212 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7213 "smile_degree=%d, smile_score=%d",
7214 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007215 }
7216 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7217 degree, numFaces);
7218 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7219 confidence, numFaces);
7220 }
7221 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7222 CAM_INTF_META_FACE_GAZE, metadata) {
7223 int8_t angle[MAX_ROI];
7224 int32_t direction[MAX_ROI * 3];
7225 int8_t degree[MAX_ROI * 2];
7226 for (size_t i = 0; i < numFaces; i++) {
7227 angle[i] = gazes->gaze[i].gaze_angle;
7228 direction[3 * i] = gazes->gaze[i].updown_dir;
7229 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7230 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7231 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7232 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007233
7234 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7235 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7236 "left_right_gaze=%d, top_bottom_gaze=%d",
7237 faceDetectionInfo->frame_id, i, angle[i],
7238 direction[3 * i], direction[3 * i + 1],
7239 direction[3 * i + 2],
7240 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007241 }
7242 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7243 (uint8_t *)angle, numFaces);
7244 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7245 direction, numFaces * 3);
7246 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7247 (uint8_t *)degree, numFaces * 2);
7248 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007249 }
7250 }
7251 }
7252 }
7253
7254 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7255 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007256 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007257 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007258 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007259
Shuzhen Wang14415f52016-11-16 18:26:18 -08007260 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7261 histogramBins = *histBins;
7262 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7263 }
7264
7265 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007266 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7267 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007268 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007269
7270 switch (stats_data->type) {
7271 case CAM_HISTOGRAM_TYPE_BAYER:
7272 switch (stats_data->bayer_stats.data_type) {
7273 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007274 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7275 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007276 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007277 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7278 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007279 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007280 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7281 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007282 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007283 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007284 case CAM_STATS_CHANNEL_R:
7285 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007286 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7287 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007288 }
7289 break;
7290 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007291 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007292 break;
7293 }
7294
Shuzhen Wang14415f52016-11-16 18:26:18 -08007295 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007296 }
7297 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007298 }
7299
7300 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7301 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7302 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7303 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7304 }
7305
7306 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7307 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7308 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7309 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7310 }
7311
7312 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7313 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7314 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7315 CAM_MAX_SHADING_MAP_HEIGHT);
7316 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7317 CAM_MAX_SHADING_MAP_WIDTH);
7318 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7319 lensShadingMap->lens_shading, 4U * map_width * map_height);
7320 }
7321
7322 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7323 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7324 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7325 }
7326
7327 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7328 //Populate CAM_INTF_META_TONEMAP_CURVES
7329 /* ch0 = G, ch 1 = B, ch 2 = R*/
7330 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7331 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7332 tonemap->tonemap_points_cnt,
7333 CAM_MAX_TONEMAP_CURVE_SIZE);
7334 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7335 }
7336
7337 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7338 &tonemap->curves[0].tonemap_points[0][0],
7339 tonemap->tonemap_points_cnt * 2);
7340
7341 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7342 &tonemap->curves[1].tonemap_points[0][0],
7343 tonemap->tonemap_points_cnt * 2);
7344
7345 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7346 &tonemap->curves[2].tonemap_points[0][0],
7347 tonemap->tonemap_points_cnt * 2);
7348 }
7349
7350 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7351 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7352 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7353 CC_GAIN_MAX);
7354 }
7355
7356 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7357 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7358 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7359 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7360 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7361 }
7362
7363 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7364 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7365 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7366 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7367 toneCurve->tonemap_points_cnt,
7368 CAM_MAX_TONEMAP_CURVE_SIZE);
7369 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7370 }
7371 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7372 (float*)toneCurve->curve.tonemap_points,
7373 toneCurve->tonemap_points_cnt * 2);
7374 }
7375
7376 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7377 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7378 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7379 predColorCorrectionGains->gains, 4);
7380 }
7381
7382 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7383 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7384 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7385 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7386 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7387 }
7388
7389 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7390 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7391 }
7392
7393 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7394 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7395 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7396 }
7397
7398 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7399 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7400 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7401 }
7402
7403 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7404 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7405 *effectMode);
7406 if (NAME_NOT_FOUND != val) {
7407 uint8_t fwk_effectMode = (uint8_t)val;
7408 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7409 }
7410 }
7411
7412 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7413 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7414 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7415 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7416 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7417 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7418 }
7419 int32_t fwk_testPatternData[4];
7420 fwk_testPatternData[0] = testPatternData->r;
7421 fwk_testPatternData[3] = testPatternData->b;
7422 switch (gCamCapability[mCameraId]->color_arrangement) {
7423 case CAM_FILTER_ARRANGEMENT_RGGB:
7424 case CAM_FILTER_ARRANGEMENT_GRBG:
7425 fwk_testPatternData[1] = testPatternData->gr;
7426 fwk_testPatternData[2] = testPatternData->gb;
7427 break;
7428 case CAM_FILTER_ARRANGEMENT_GBRG:
7429 case CAM_FILTER_ARRANGEMENT_BGGR:
7430 fwk_testPatternData[2] = testPatternData->gr;
7431 fwk_testPatternData[1] = testPatternData->gb;
7432 break;
7433 default:
7434 LOGE("color arrangement %d is not supported",
7435 gCamCapability[mCameraId]->color_arrangement);
7436 break;
7437 }
7438 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7439 }
7440
7441 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7442 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7443 }
7444
7445 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7446 String8 str((const char *)gps_methods);
7447 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7448 }
7449
7450 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7451 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7452 }
7453
7454 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7455 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7456 }
7457
7458 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7459 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7460 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7461 }
7462
7463 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7464 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7465 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7466 }
7467
7468 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7469 int32_t fwk_thumb_size[2];
7470 fwk_thumb_size[0] = thumb_size->width;
7471 fwk_thumb_size[1] = thumb_size->height;
7472 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7473 }
7474
7475 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7476 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7477 privateData,
7478 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7479 }
7480
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007481 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007482 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007483 meteringMode, 1);
7484 }
7485
Thierry Strudel54dc9782017-02-15 12:12:10 -08007486 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7487 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7488 LOGD("hdr_scene_data: %d %f\n",
7489 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7490 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7491 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7492 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7493 &isHdr, 1);
7494 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7495 &isHdrConfidence, 1);
7496 }
7497
7498
7499
Thierry Strudel3d639192016-09-09 11:52:26 -07007500 if (metadata->is_tuning_params_valid) {
7501 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7502 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7503 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7504
7505
7506 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7507 sizeof(uint32_t));
7508 data += sizeof(uint32_t);
7509
7510 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7511 sizeof(uint32_t));
7512 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7513 data += sizeof(uint32_t);
7514
7515 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7516 sizeof(uint32_t));
7517 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7518 data += sizeof(uint32_t);
7519
7520 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7521 sizeof(uint32_t));
7522 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7523 data += sizeof(uint32_t);
7524
7525 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7526 sizeof(uint32_t));
7527 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7528 data += sizeof(uint32_t);
7529
7530 metadata->tuning_params.tuning_mod3_data_size = 0;
7531 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7532 sizeof(uint32_t));
7533 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7534 data += sizeof(uint32_t);
7535
7536 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7537 TUNING_SENSOR_DATA_MAX);
7538 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7539 count);
7540 data += count;
7541
7542 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7543 TUNING_VFE_DATA_MAX);
7544 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7545 count);
7546 data += count;
7547
7548 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7549 TUNING_CPP_DATA_MAX);
7550 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7551 count);
7552 data += count;
7553
7554 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7555 TUNING_CAC_DATA_MAX);
7556 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7557 count);
7558 data += count;
7559
7560 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7561 (int32_t *)(void *)tuning_meta_data_blob,
7562 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7563 }
7564
7565 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7566 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7567 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7568 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7569 NEUTRAL_COL_POINTS);
7570 }
7571
7572 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7573 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7574 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7575 }
7576
7577 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7578 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7579 // Adjust crop region from sensor output coordinate system to active
7580 // array coordinate system.
7581 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7582 hAeRegions->rect.width, hAeRegions->rect.height);
7583
7584 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7585 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7586 REGIONS_TUPLE_COUNT);
7587 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7588 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7589 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7590 hAeRegions->rect.height);
7591 }
7592
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007593 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7594 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7595 if (NAME_NOT_FOUND != val) {
7596 uint8_t fwkAfMode = (uint8_t)val;
7597 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7598 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7599 } else {
7600 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7601 val);
7602 }
7603 }
7604
Thierry Strudel3d639192016-09-09 11:52:26 -07007605 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7606 uint8_t fwk_afState = (uint8_t) *afState;
7607 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007608 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007609 }
7610
7611 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7612 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7613 }
7614
7615 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7616 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7617 }
7618
7619 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7620 uint8_t fwk_lensState = *lensState;
7621 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7622 }
7623
Thierry Strudel3d639192016-09-09 11:52:26 -07007624
7625 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007626 uint32_t ab_mode = *hal_ab_mode;
7627 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7628 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7629 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7630 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007631 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007632 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007633 if (NAME_NOT_FOUND != val) {
7634 uint8_t fwk_ab_mode = (uint8_t)val;
7635 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7636 }
7637 }
7638
7639 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7640 int val = lookupFwkName(SCENE_MODES_MAP,
7641 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7642 if (NAME_NOT_FOUND != val) {
7643 uint8_t fwkBestshotMode = (uint8_t)val;
7644 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7645 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7646 } else {
7647 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7648 }
7649 }
7650
7651 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7652 uint8_t fwk_mode = (uint8_t) *mode;
7653 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7654 }
7655
7656 /* Constant metadata values to be update*/
7657 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7658 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7659
7660 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7661 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7662
7663 int32_t hotPixelMap[2];
7664 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7665
7666 // CDS
7667 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7668 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7669 }
7670
Thierry Strudel04e026f2016-10-10 11:27:36 -07007671 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7672 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007673 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007674 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7675 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7676 } else {
7677 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7678 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007679
7680 if(fwk_hdr != curr_hdr_state) {
7681 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7682 if(fwk_hdr)
7683 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7684 else
7685 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7686 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007687 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7688 }
7689
Thierry Strudel54dc9782017-02-15 12:12:10 -08007690 //binning correction
7691 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7692 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7693 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7694 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7695 }
7696
Thierry Strudel04e026f2016-10-10 11:27:36 -07007697 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007698 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007699 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7700 int8_t is_ir_on = 0;
7701
7702 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7703 if(is_ir_on != curr_ir_state) {
7704 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7705 if(is_ir_on)
7706 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7707 else
7708 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7709 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007710 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007711 }
7712
Thierry Strudel269c81a2016-10-12 12:13:59 -07007713 // AEC SPEED
7714 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7715 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7716 }
7717
7718 // AWB SPEED
7719 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7720 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7721 }
7722
Thierry Strudel3d639192016-09-09 11:52:26 -07007723 // TNR
7724 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7725 uint8_t tnr_enable = tnr->denoise_enable;
7726 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007727 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7728 int8_t is_tnr_on = 0;
7729
7730 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7731 if(is_tnr_on != curr_tnr_state) {
7732 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7733 if(is_tnr_on)
7734 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7735 else
7736 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7737 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007738
7739 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7740 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7741 }
7742
7743 // Reprocess crop data
7744 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7745 uint8_t cnt = crop_data->num_of_streams;
7746 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7747 // mm-qcamera-daemon only posts crop_data for streams
7748 // not linked to pproc. So no valid crop metadata is not
7749 // necessarily an error case.
7750 LOGD("No valid crop metadata entries");
7751 } else {
7752 uint32_t reproc_stream_id;
7753 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7754 LOGD("No reprocessible stream found, ignore crop data");
7755 } else {
7756 int rc = NO_ERROR;
7757 Vector<int32_t> roi_map;
7758 int32_t *crop = new int32_t[cnt*4];
7759 if (NULL == crop) {
7760 rc = NO_MEMORY;
7761 }
7762 if (NO_ERROR == rc) {
7763 int32_t streams_found = 0;
7764 for (size_t i = 0; i < cnt; i++) {
7765 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7766 if (pprocDone) {
7767 // HAL already does internal reprocessing,
7768 // either via reprocessing before JPEG encoding,
7769 // or offline postprocessing for pproc bypass case.
7770 crop[0] = 0;
7771 crop[1] = 0;
7772 crop[2] = mInputStreamInfo.dim.width;
7773 crop[3] = mInputStreamInfo.dim.height;
7774 } else {
7775 crop[0] = crop_data->crop_info[i].crop.left;
7776 crop[1] = crop_data->crop_info[i].crop.top;
7777 crop[2] = crop_data->crop_info[i].crop.width;
7778 crop[3] = crop_data->crop_info[i].crop.height;
7779 }
7780 roi_map.add(crop_data->crop_info[i].roi_map.left);
7781 roi_map.add(crop_data->crop_info[i].roi_map.top);
7782 roi_map.add(crop_data->crop_info[i].roi_map.width);
7783 roi_map.add(crop_data->crop_info[i].roi_map.height);
7784 streams_found++;
7785 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7786 crop[0], crop[1], crop[2], crop[3]);
7787 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7788 crop_data->crop_info[i].roi_map.left,
7789 crop_data->crop_info[i].roi_map.top,
7790 crop_data->crop_info[i].roi_map.width,
7791 crop_data->crop_info[i].roi_map.height);
7792 break;
7793
7794 }
7795 }
7796 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7797 &streams_found, 1);
7798 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7799 crop, (size_t)(streams_found * 4));
7800 if (roi_map.array()) {
7801 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7802 roi_map.array(), roi_map.size());
7803 }
7804 }
7805 if (crop) {
7806 delete [] crop;
7807 }
7808 }
7809 }
7810 }
7811
7812 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7813 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7814 // so hardcoding the CAC result to OFF mode.
7815 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7816 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7817 } else {
7818 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7819 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7820 *cacMode);
7821 if (NAME_NOT_FOUND != val) {
7822 uint8_t resultCacMode = (uint8_t)val;
7823 // check whether CAC result from CB is equal to Framework set CAC mode
7824 // If not equal then set the CAC mode came in corresponding request
7825 if (fwk_cacMode != resultCacMode) {
7826 resultCacMode = fwk_cacMode;
7827 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007828 //Check if CAC is disabled by property
7829 if (m_cacModeDisabled) {
7830 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7831 }
7832
Thierry Strudel3d639192016-09-09 11:52:26 -07007833 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7834 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7835 } else {
7836 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7837 }
7838 }
7839 }
7840
7841 // Post blob of cam_cds_data through vendor tag.
7842 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7843 uint8_t cnt = cdsInfo->num_of_streams;
7844 cam_cds_data_t cdsDataOverride;
7845 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7846 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7847 cdsDataOverride.num_of_streams = 1;
7848 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7849 uint32_t reproc_stream_id;
7850 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7851 LOGD("No reprocessible stream found, ignore cds data");
7852 } else {
7853 for (size_t i = 0; i < cnt; i++) {
7854 if (cdsInfo->cds_info[i].stream_id ==
7855 reproc_stream_id) {
7856 cdsDataOverride.cds_info[0].cds_enable =
7857 cdsInfo->cds_info[i].cds_enable;
7858 break;
7859 }
7860 }
7861 }
7862 } else {
7863 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7864 }
7865 camMetadata.update(QCAMERA3_CDS_INFO,
7866 (uint8_t *)&cdsDataOverride,
7867 sizeof(cam_cds_data_t));
7868 }
7869
7870 // Ldaf calibration data
7871 if (!mLdafCalibExist) {
7872 IF_META_AVAILABLE(uint32_t, ldafCalib,
7873 CAM_INTF_META_LDAF_EXIF, metadata) {
7874 mLdafCalibExist = true;
7875 mLdafCalib[0] = ldafCalib[0];
7876 mLdafCalib[1] = ldafCalib[1];
7877 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7878 ldafCalib[0], ldafCalib[1]);
7879 }
7880 }
7881
Thierry Strudel54dc9782017-02-15 12:12:10 -08007882 // EXIF debug data through vendor tag
7883 /*
7884 * Mobicat Mask can assume 3 values:
7885 * 1 refers to Mobicat data,
7886 * 2 refers to Stats Debug and Exif Debug Data
7887 * 3 refers to Mobicat and Stats Debug Data
7888 * We want to make sure that we are sending Exif debug data
7889 * only when Mobicat Mask is 2.
7890 */
7891 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7892 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7893 (uint8_t *)(void *)mExifParams.debug_params,
7894 sizeof(mm_jpeg_debug_exif_params_t));
7895 }
7896
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007897 // Reprocess and DDM debug data through vendor tag
7898 cam_reprocess_info_t repro_info;
7899 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007900 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7901 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007902 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007903 }
7904 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7905 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007906 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007907 }
7908 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7909 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007910 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007911 }
7912 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7913 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007914 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007915 }
7916 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7917 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007918 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007919 }
7920 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007921 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007922 }
7923 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7924 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007925 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007926 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007927 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7928 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7929 }
7930 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7931 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7932 }
7933 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7934 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007935
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007936 // INSTANT AEC MODE
7937 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7938 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7939 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7940 }
7941
Shuzhen Wange763e802016-03-31 10:24:29 -07007942 // AF scene change
7943 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7944 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7945 }
7946
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007947 // Enable ZSL
7948 if (enableZsl != nullptr) {
7949 uint8_t value = *enableZsl ?
7950 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7951 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7952 }
7953
Thierry Strudel3d639192016-09-09 11:52:26 -07007954 resultMetadata = camMetadata.release();
7955 return resultMetadata;
7956}
7957
7958/*===========================================================================
7959 * FUNCTION : saveExifParams
7960 *
7961 * DESCRIPTION:
7962 *
7963 * PARAMETERS :
7964 * @metadata : metadata information from callback
7965 *
7966 * RETURN : none
7967 *
7968 *==========================================================================*/
7969void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7970{
7971 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7972 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7973 if (mExifParams.debug_params) {
7974 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7975 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7976 }
7977 }
7978 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7979 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7980 if (mExifParams.debug_params) {
7981 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7982 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7983 }
7984 }
7985 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7986 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7987 if (mExifParams.debug_params) {
7988 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7989 mExifParams.debug_params->af_debug_params_valid = TRUE;
7990 }
7991 }
7992 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7993 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7994 if (mExifParams.debug_params) {
7995 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7996 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7997 }
7998 }
7999 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8000 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8001 if (mExifParams.debug_params) {
8002 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8003 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8004 }
8005 }
8006 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8007 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8008 if (mExifParams.debug_params) {
8009 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8010 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8011 }
8012 }
8013 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8014 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8015 if (mExifParams.debug_params) {
8016 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8017 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8018 }
8019 }
8020 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8021 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8022 if (mExifParams.debug_params) {
8023 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8024 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8025 }
8026 }
8027}
8028
8029/*===========================================================================
8030 * FUNCTION : get3AExifParams
8031 *
8032 * DESCRIPTION:
8033 *
8034 * PARAMETERS : none
8035 *
8036 *
8037 * RETURN : mm_jpeg_exif_params_t
8038 *
8039 *==========================================================================*/
8040mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8041{
8042 return mExifParams;
8043}
8044
8045/*===========================================================================
8046 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8047 *
8048 * DESCRIPTION:
8049 *
8050 * PARAMETERS :
8051 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008052 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8053 * urgent metadata in a batch. Always true for
8054 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008055 *
8056 * RETURN : camera_metadata_t*
8057 * metadata in a format specified by fwk
8058 *==========================================================================*/
8059camera_metadata_t*
8060QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008061 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008062{
8063 CameraMetadata camMetadata;
8064 camera_metadata_t *resultMetadata;
8065
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008066 if (!lastUrgentMetadataInBatch) {
8067 /* In batch mode, use empty metadata if this is not the last in batch
8068 */
8069 resultMetadata = allocate_camera_metadata(0, 0);
8070 return resultMetadata;
8071 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008072
8073 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8074 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8075 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8076 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8077 }
8078
8079 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8080 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8081 &aecTrigger->trigger, 1);
8082 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8083 &aecTrigger->trigger_id, 1);
8084 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8085 aecTrigger->trigger);
8086 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8087 aecTrigger->trigger_id);
8088 }
8089
8090 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8091 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8092 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8093 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8094 }
8095
Thierry Strudel3d639192016-09-09 11:52:26 -07008096 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8097 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8098 &af_trigger->trigger, 1);
8099 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8100 af_trigger->trigger);
8101 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8102 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8103 af_trigger->trigger_id);
8104 }
8105
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008106 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8107 /*af regions*/
8108 int32_t afRegions[REGIONS_TUPLE_COUNT];
8109 // Adjust crop region from sensor output coordinate system to active
8110 // array coordinate system.
8111 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8112 hAfRegions->rect.width, hAfRegions->rect.height);
8113
8114 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8115 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8116 REGIONS_TUPLE_COUNT);
8117 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8118 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8119 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8120 hAfRegions->rect.height);
8121 }
8122
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008123 // AF region confidence
8124 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8125 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8126 }
8127
Thierry Strudel3d639192016-09-09 11:52:26 -07008128 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8129 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8130 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8131 if (NAME_NOT_FOUND != val) {
8132 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8133 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8134 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8135 } else {
8136 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8137 }
8138 }
8139
8140 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8141 uint32_t aeMode = CAM_AE_MODE_MAX;
8142 int32_t flashMode = CAM_FLASH_MODE_MAX;
8143 int32_t redeye = -1;
8144 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8145 aeMode = *pAeMode;
8146 }
8147 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8148 flashMode = *pFlashMode;
8149 }
8150 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8151 redeye = *pRedeye;
8152 }
8153
8154 if (1 == redeye) {
8155 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8156 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8157 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8158 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8159 flashMode);
8160 if (NAME_NOT_FOUND != val) {
8161 fwk_aeMode = (uint8_t)val;
8162 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8163 } else {
8164 LOGE("Unsupported flash mode %d", flashMode);
8165 }
8166 } else if (aeMode == CAM_AE_MODE_ON) {
8167 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8168 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8169 } else if (aeMode == CAM_AE_MODE_OFF) {
8170 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8171 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008172 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8173 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8174 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008175 } else {
8176 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8177 "flashMode:%d, aeMode:%u!!!",
8178 redeye, flashMode, aeMode);
8179 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008180 if (mInstantAEC) {
8181 // Increment frame Idx count untill a bound reached for instant AEC.
8182 mInstantAecFrameIdxCount++;
8183 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8184 CAM_INTF_META_AEC_INFO, metadata) {
8185 LOGH("ae_params->settled = %d",ae_params->settled);
8186 // If AEC settled, or if number of frames reached bound value,
8187 // should reset instant AEC.
8188 if (ae_params->settled ||
8189 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8190 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8191 mInstantAEC = false;
8192 mResetInstantAEC = true;
8193 mInstantAecFrameIdxCount = 0;
8194 }
8195 }
8196 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008197 resultMetadata = camMetadata.release();
8198 return resultMetadata;
8199}
8200
8201/*===========================================================================
8202 * FUNCTION : dumpMetadataToFile
8203 *
8204 * DESCRIPTION: Dumps tuning metadata to file system
8205 *
8206 * PARAMETERS :
8207 * @meta : tuning metadata
8208 * @dumpFrameCount : current dump frame count
8209 * @enabled : Enable mask
8210 *
8211 *==========================================================================*/
8212void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8213 uint32_t &dumpFrameCount,
8214 bool enabled,
8215 const char *type,
8216 uint32_t frameNumber)
8217{
8218 //Some sanity checks
8219 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8220 LOGE("Tuning sensor data size bigger than expected %d: %d",
8221 meta.tuning_sensor_data_size,
8222 TUNING_SENSOR_DATA_MAX);
8223 return;
8224 }
8225
8226 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8227 LOGE("Tuning VFE data size bigger than expected %d: %d",
8228 meta.tuning_vfe_data_size,
8229 TUNING_VFE_DATA_MAX);
8230 return;
8231 }
8232
8233 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8234 LOGE("Tuning CPP data size bigger than expected %d: %d",
8235 meta.tuning_cpp_data_size,
8236 TUNING_CPP_DATA_MAX);
8237 return;
8238 }
8239
8240 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8241 LOGE("Tuning CAC data size bigger than expected %d: %d",
8242 meta.tuning_cac_data_size,
8243 TUNING_CAC_DATA_MAX);
8244 return;
8245 }
8246 //
8247
8248 if(enabled){
8249 char timeBuf[FILENAME_MAX];
8250 char buf[FILENAME_MAX];
8251 memset(buf, 0, sizeof(buf));
8252 memset(timeBuf, 0, sizeof(timeBuf));
8253 time_t current_time;
8254 struct tm * timeinfo;
8255 time (&current_time);
8256 timeinfo = localtime (&current_time);
8257 if (timeinfo != NULL) {
8258 strftime (timeBuf, sizeof(timeBuf),
8259 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8260 }
8261 String8 filePath(timeBuf);
8262 snprintf(buf,
8263 sizeof(buf),
8264 "%dm_%s_%d.bin",
8265 dumpFrameCount,
8266 type,
8267 frameNumber);
8268 filePath.append(buf);
8269 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8270 if (file_fd >= 0) {
8271 ssize_t written_len = 0;
8272 meta.tuning_data_version = TUNING_DATA_VERSION;
8273 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8274 written_len += write(file_fd, data, sizeof(uint32_t));
8275 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8276 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8277 written_len += write(file_fd, data, sizeof(uint32_t));
8278 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8279 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8280 written_len += write(file_fd, data, sizeof(uint32_t));
8281 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8282 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8283 written_len += write(file_fd, data, sizeof(uint32_t));
8284 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8285 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8286 written_len += write(file_fd, data, sizeof(uint32_t));
8287 meta.tuning_mod3_data_size = 0;
8288 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8289 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8290 written_len += write(file_fd, data, sizeof(uint32_t));
8291 size_t total_size = meta.tuning_sensor_data_size;
8292 data = (void *)((uint8_t *)&meta.data);
8293 written_len += write(file_fd, data, total_size);
8294 total_size = meta.tuning_vfe_data_size;
8295 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8296 written_len += write(file_fd, data, total_size);
8297 total_size = meta.tuning_cpp_data_size;
8298 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8299 written_len += write(file_fd, data, total_size);
8300 total_size = meta.tuning_cac_data_size;
8301 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8302 written_len += write(file_fd, data, total_size);
8303 close(file_fd);
8304 }else {
8305 LOGE("fail to open file for metadata dumping");
8306 }
8307 }
8308}
8309
8310/*===========================================================================
8311 * FUNCTION : cleanAndSortStreamInfo
8312 *
8313 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8314 * and sort them such that raw stream is at the end of the list
8315 * This is a workaround for camera daemon constraint.
8316 *
8317 * PARAMETERS : None
8318 *
8319 *==========================================================================*/
8320void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8321{
8322 List<stream_info_t *> newStreamInfo;
8323
8324 /*clean up invalid streams*/
8325 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8326 it != mStreamInfo.end();) {
8327 if(((*it)->status) == INVALID){
8328 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8329 delete channel;
8330 free(*it);
8331 it = mStreamInfo.erase(it);
8332 } else {
8333 it++;
8334 }
8335 }
8336
8337 // Move preview/video/callback/snapshot streams into newList
8338 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8339 it != mStreamInfo.end();) {
8340 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8341 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8342 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8343 newStreamInfo.push_back(*it);
8344 it = mStreamInfo.erase(it);
8345 } else
8346 it++;
8347 }
8348 // Move raw streams into newList
8349 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8350 it != mStreamInfo.end();) {
8351 newStreamInfo.push_back(*it);
8352 it = mStreamInfo.erase(it);
8353 }
8354
8355 mStreamInfo = newStreamInfo;
8356}
8357
8358/*===========================================================================
8359 * FUNCTION : extractJpegMetadata
8360 *
8361 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8362 * JPEG metadata is cached in HAL, and return as part of capture
8363 * result when metadata is returned from camera daemon.
8364 *
8365 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8366 * @request: capture request
8367 *
8368 *==========================================================================*/
8369void QCamera3HardwareInterface::extractJpegMetadata(
8370 CameraMetadata& jpegMetadata,
8371 const camera3_capture_request_t *request)
8372{
8373 CameraMetadata frame_settings;
8374 frame_settings = request->settings;
8375
8376 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8377 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8378 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8379 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8380
8381 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8382 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8383 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8384 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8385
8386 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8387 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8388 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8389 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8390
8391 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8392 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8393 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8394 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8395
8396 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8397 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8398 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8399 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8400
8401 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8402 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8403 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8404 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8405
8406 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8407 int32_t thumbnail_size[2];
8408 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8409 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8410 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8411 int32_t orientation =
8412 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008413 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008414 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8415 int32_t temp;
8416 temp = thumbnail_size[0];
8417 thumbnail_size[0] = thumbnail_size[1];
8418 thumbnail_size[1] = temp;
8419 }
8420 }
8421 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8422 thumbnail_size,
8423 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8424 }
8425
8426}
8427
8428/*===========================================================================
8429 * FUNCTION : convertToRegions
8430 *
8431 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8432 *
8433 * PARAMETERS :
8434 * @rect : cam_rect_t struct to convert
8435 * @region : int32_t destination array
8436 * @weight : if we are converting from cam_area_t, weight is valid
8437 * else weight = -1
8438 *
8439 *==========================================================================*/
8440void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8441 int32_t *region, int weight)
8442{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008443 region[FACE_LEFT] = rect.left;
8444 region[FACE_TOP] = rect.top;
8445 region[FACE_RIGHT] = rect.left + rect.width;
8446 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008447 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008448 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008449 }
8450}
8451
8452/*===========================================================================
8453 * FUNCTION : convertFromRegions
8454 *
8455 * DESCRIPTION: helper method to convert from array to cam_rect_t
8456 *
8457 * PARAMETERS :
8458 * @rect : cam_rect_t struct to convert
8459 * @region : int32_t destination array
8460 * @weight : if we are converting from cam_area_t, weight is valid
8461 * else weight = -1
8462 *
8463 *==========================================================================*/
8464void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008465 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008466{
Thierry Strudel3d639192016-09-09 11:52:26 -07008467 int32_t x_min = frame_settings.find(tag).data.i32[0];
8468 int32_t y_min = frame_settings.find(tag).data.i32[1];
8469 int32_t x_max = frame_settings.find(tag).data.i32[2];
8470 int32_t y_max = frame_settings.find(tag).data.i32[3];
8471 roi.weight = frame_settings.find(tag).data.i32[4];
8472 roi.rect.left = x_min;
8473 roi.rect.top = y_min;
8474 roi.rect.width = x_max - x_min;
8475 roi.rect.height = y_max - y_min;
8476}
8477
8478/*===========================================================================
8479 * FUNCTION : resetIfNeededROI
8480 *
8481 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8482 * crop region
8483 *
8484 * PARAMETERS :
8485 * @roi : cam_area_t struct to resize
8486 * @scalerCropRegion : cam_crop_region_t region to compare against
8487 *
8488 *
8489 *==========================================================================*/
8490bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8491 const cam_crop_region_t* scalerCropRegion)
8492{
8493 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8494 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8495 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8496 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8497
8498 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8499 * without having this check the calculations below to validate if the roi
8500 * is inside scalar crop region will fail resulting in the roi not being
8501 * reset causing algorithm to continue to use stale roi window
8502 */
8503 if (roi->weight == 0) {
8504 return true;
8505 }
8506
8507 if ((roi_x_max < scalerCropRegion->left) ||
8508 // right edge of roi window is left of scalar crop's left edge
8509 (roi_y_max < scalerCropRegion->top) ||
8510 // bottom edge of roi window is above scalar crop's top edge
8511 (roi->rect.left > crop_x_max) ||
8512 // left edge of roi window is beyond(right) of scalar crop's right edge
8513 (roi->rect.top > crop_y_max)){
8514 // top edge of roi windo is above scalar crop's top edge
8515 return false;
8516 }
8517 if (roi->rect.left < scalerCropRegion->left) {
8518 roi->rect.left = scalerCropRegion->left;
8519 }
8520 if (roi->rect.top < scalerCropRegion->top) {
8521 roi->rect.top = scalerCropRegion->top;
8522 }
8523 if (roi_x_max > crop_x_max) {
8524 roi_x_max = crop_x_max;
8525 }
8526 if (roi_y_max > crop_y_max) {
8527 roi_y_max = crop_y_max;
8528 }
8529 roi->rect.width = roi_x_max - roi->rect.left;
8530 roi->rect.height = roi_y_max - roi->rect.top;
8531 return true;
8532}
8533
8534/*===========================================================================
8535 * FUNCTION : convertLandmarks
8536 *
8537 * DESCRIPTION: helper method to extract the landmarks from face detection info
8538 *
8539 * PARAMETERS :
8540 * @landmark_data : input landmark data to be converted
8541 * @landmarks : int32_t destination array
8542 *
8543 *
8544 *==========================================================================*/
8545void QCamera3HardwareInterface::convertLandmarks(
8546 cam_face_landmarks_info_t landmark_data,
8547 int32_t *landmarks)
8548{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008549 if (landmark_data.is_left_eye_valid) {
8550 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8551 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8552 } else {
8553 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8554 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8555 }
8556
8557 if (landmark_data.is_right_eye_valid) {
8558 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8559 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8560 } else {
8561 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8562 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8563 }
8564
8565 if (landmark_data.is_mouth_valid) {
8566 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8567 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8568 } else {
8569 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8570 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8571 }
8572}
8573
8574/*===========================================================================
8575 * FUNCTION : setInvalidLandmarks
8576 *
8577 * DESCRIPTION: helper method to set invalid landmarks
8578 *
8579 * PARAMETERS :
8580 * @landmarks : int32_t destination array
8581 *
8582 *
8583 *==========================================================================*/
8584void QCamera3HardwareInterface::setInvalidLandmarks(
8585 int32_t *landmarks)
8586{
8587 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8588 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8589 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8590 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8591 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8592 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008593}
8594
8595#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008596
8597/*===========================================================================
8598 * FUNCTION : getCapabilities
8599 *
8600 * DESCRIPTION: query camera capability from back-end
8601 *
8602 * PARAMETERS :
8603 * @ops : mm-interface ops structure
8604 * @cam_handle : camera handle for which we need capability
8605 *
8606 * RETURN : ptr type of capability structure
8607 * capability for success
8608 * NULL for failure
8609 *==========================================================================*/
8610cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8611 uint32_t cam_handle)
8612{
8613 int rc = NO_ERROR;
8614 QCamera3HeapMemory *capabilityHeap = NULL;
8615 cam_capability_t *cap_ptr = NULL;
8616
8617 if (ops == NULL) {
8618 LOGE("Invalid arguments");
8619 return NULL;
8620 }
8621
8622 capabilityHeap = new QCamera3HeapMemory(1);
8623 if (capabilityHeap == NULL) {
8624 LOGE("creation of capabilityHeap failed");
8625 return NULL;
8626 }
8627
8628 /* Allocate memory for capability buffer */
8629 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8630 if(rc != OK) {
8631 LOGE("No memory for cappability");
8632 goto allocate_failed;
8633 }
8634
8635 /* Map memory for capability buffer */
8636 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8637
8638 rc = ops->map_buf(cam_handle,
8639 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8640 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8641 if(rc < 0) {
8642 LOGE("failed to map capability buffer");
8643 rc = FAILED_TRANSACTION;
8644 goto map_failed;
8645 }
8646
8647 /* Query Capability */
8648 rc = ops->query_capability(cam_handle);
8649 if(rc < 0) {
8650 LOGE("failed to query capability");
8651 rc = FAILED_TRANSACTION;
8652 goto query_failed;
8653 }
8654
8655 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8656 if (cap_ptr == NULL) {
8657 LOGE("out of memory");
8658 rc = NO_MEMORY;
8659 goto query_failed;
8660 }
8661
8662 memset(cap_ptr, 0, sizeof(cam_capability_t));
8663 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8664
8665 int index;
8666 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8667 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8668 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8669 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8670 }
8671
8672query_failed:
8673 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8674map_failed:
8675 capabilityHeap->deallocate();
8676allocate_failed:
8677 delete capabilityHeap;
8678
8679 if (rc != NO_ERROR) {
8680 return NULL;
8681 } else {
8682 return cap_ptr;
8683 }
8684}
8685
Thierry Strudel3d639192016-09-09 11:52:26 -07008686/*===========================================================================
8687 * FUNCTION : initCapabilities
8688 *
8689 * DESCRIPTION: initialize camera capabilities in static data struct
8690 *
8691 * PARAMETERS :
8692 * @cameraId : camera Id
8693 *
8694 * RETURN : int32_t type of status
8695 * NO_ERROR -- success
8696 * none-zero failure code
8697 *==========================================================================*/
8698int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8699{
8700 int rc = 0;
8701 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008702 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008703
8704 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8705 if (rc) {
8706 LOGE("camera_open failed. rc = %d", rc);
8707 goto open_failed;
8708 }
8709 if (!cameraHandle) {
8710 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8711 goto open_failed;
8712 }
8713
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008714 handle = get_main_camera_handle(cameraHandle->camera_handle);
8715 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8716 if (gCamCapability[cameraId] == NULL) {
8717 rc = FAILED_TRANSACTION;
8718 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008719 }
8720
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008721 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008722 if (is_dual_camera_by_idx(cameraId)) {
8723 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8724 gCamCapability[cameraId]->aux_cam_cap =
8725 getCapabilities(cameraHandle->ops, handle);
8726 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8727 rc = FAILED_TRANSACTION;
8728 free(gCamCapability[cameraId]);
8729 goto failed_op;
8730 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008731
8732 // Copy the main camera capability to main_cam_cap struct
8733 gCamCapability[cameraId]->main_cam_cap =
8734 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8735 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8736 LOGE("out of memory");
8737 rc = NO_MEMORY;
8738 goto failed_op;
8739 }
8740 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8741 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008742 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008743failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008744 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8745 cameraHandle = NULL;
8746open_failed:
8747 return rc;
8748}
8749
8750/*==========================================================================
8751 * FUNCTION : get3Aversion
8752 *
8753 * DESCRIPTION: get the Q3A S/W version
8754 *
8755 * PARAMETERS :
8756 * @sw_version: Reference of Q3A structure which will hold version info upon
8757 * return
8758 *
8759 * RETURN : None
8760 *
8761 *==========================================================================*/
8762void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8763{
8764 if(gCamCapability[mCameraId])
8765 sw_version = gCamCapability[mCameraId]->q3a_version;
8766 else
8767 LOGE("Capability structure NULL!");
8768}
8769
8770
8771/*===========================================================================
8772 * FUNCTION : initParameters
8773 *
8774 * DESCRIPTION: initialize camera parameters
8775 *
8776 * PARAMETERS :
8777 *
8778 * RETURN : int32_t type of status
8779 * NO_ERROR -- success
8780 * none-zero failure code
8781 *==========================================================================*/
8782int QCamera3HardwareInterface::initParameters()
8783{
8784 int rc = 0;
8785
8786 //Allocate Set Param Buffer
8787 mParamHeap = new QCamera3HeapMemory(1);
8788 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8789 if(rc != OK) {
8790 rc = NO_MEMORY;
8791 LOGE("Failed to allocate SETPARM Heap memory");
8792 delete mParamHeap;
8793 mParamHeap = NULL;
8794 return rc;
8795 }
8796
8797 //Map memory for parameters buffer
8798 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8799 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8800 mParamHeap->getFd(0),
8801 sizeof(metadata_buffer_t),
8802 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8803 if(rc < 0) {
8804 LOGE("failed to map SETPARM buffer");
8805 rc = FAILED_TRANSACTION;
8806 mParamHeap->deallocate();
8807 delete mParamHeap;
8808 mParamHeap = NULL;
8809 return rc;
8810 }
8811
8812 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8813
8814 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8815 return rc;
8816}
8817
8818/*===========================================================================
8819 * FUNCTION : deinitParameters
8820 *
8821 * DESCRIPTION: de-initialize camera parameters
8822 *
8823 * PARAMETERS :
8824 *
8825 * RETURN : NONE
8826 *==========================================================================*/
8827void QCamera3HardwareInterface::deinitParameters()
8828{
8829 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8830 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8831
8832 mParamHeap->deallocate();
8833 delete mParamHeap;
8834 mParamHeap = NULL;
8835
8836 mParameters = NULL;
8837
8838 free(mPrevParameters);
8839 mPrevParameters = NULL;
8840}
8841
8842/*===========================================================================
8843 * FUNCTION : calcMaxJpegSize
8844 *
8845 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8846 *
8847 * PARAMETERS :
8848 *
8849 * RETURN : max_jpeg_size
8850 *==========================================================================*/
8851size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8852{
8853 size_t max_jpeg_size = 0;
8854 size_t temp_width, temp_height;
8855 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8856 MAX_SIZES_CNT);
8857 for (size_t i = 0; i < count; i++) {
8858 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8859 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8860 if (temp_width * temp_height > max_jpeg_size ) {
8861 max_jpeg_size = temp_width * temp_height;
8862 }
8863 }
8864 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8865 return max_jpeg_size;
8866}
8867
8868/*===========================================================================
8869 * FUNCTION : getMaxRawSize
8870 *
8871 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8872 *
8873 * PARAMETERS :
8874 *
8875 * RETURN : Largest supported Raw Dimension
8876 *==========================================================================*/
8877cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8878{
8879 int max_width = 0;
8880 cam_dimension_t maxRawSize;
8881
8882 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8883 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8884 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8885 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8886 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8887 }
8888 }
8889 return maxRawSize;
8890}
8891
8892
8893/*===========================================================================
8894 * FUNCTION : calcMaxJpegDim
8895 *
8896 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8897 *
8898 * PARAMETERS :
8899 *
8900 * RETURN : max_jpeg_dim
8901 *==========================================================================*/
8902cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8903{
8904 cam_dimension_t max_jpeg_dim;
8905 cam_dimension_t curr_jpeg_dim;
8906 max_jpeg_dim.width = 0;
8907 max_jpeg_dim.height = 0;
8908 curr_jpeg_dim.width = 0;
8909 curr_jpeg_dim.height = 0;
8910 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8911 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8912 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8913 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8914 max_jpeg_dim.width * max_jpeg_dim.height ) {
8915 max_jpeg_dim.width = curr_jpeg_dim.width;
8916 max_jpeg_dim.height = curr_jpeg_dim.height;
8917 }
8918 }
8919 return max_jpeg_dim;
8920}
8921
8922/*===========================================================================
8923 * FUNCTION : addStreamConfig
8924 *
8925 * DESCRIPTION: adds the stream configuration to the array
8926 *
8927 * PARAMETERS :
8928 * @available_stream_configs : pointer to stream configuration array
8929 * @scalar_format : scalar format
8930 * @dim : configuration dimension
8931 * @config_type : input or output configuration type
8932 *
8933 * RETURN : NONE
8934 *==========================================================================*/
8935void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8936 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8937{
8938 available_stream_configs.add(scalar_format);
8939 available_stream_configs.add(dim.width);
8940 available_stream_configs.add(dim.height);
8941 available_stream_configs.add(config_type);
8942}
8943
8944/*===========================================================================
8945 * FUNCTION : suppportBurstCapture
8946 *
8947 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8948 *
8949 * PARAMETERS :
8950 * @cameraId : camera Id
8951 *
8952 * RETURN : true if camera supports BURST_CAPTURE
8953 * false otherwise
8954 *==========================================================================*/
8955bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8956{
8957 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8958 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8959 const int32_t highResWidth = 3264;
8960 const int32_t highResHeight = 2448;
8961
8962 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8963 // Maximum resolution images cannot be captured at >= 10fps
8964 // -> not supporting BURST_CAPTURE
8965 return false;
8966 }
8967
8968 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8969 // Maximum resolution images can be captured at >= 20fps
8970 // --> supporting BURST_CAPTURE
8971 return true;
8972 }
8973
8974 // Find the smallest highRes resolution, or largest resolution if there is none
8975 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8976 MAX_SIZES_CNT);
8977 size_t highRes = 0;
8978 while ((highRes + 1 < totalCnt) &&
8979 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8980 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8981 highResWidth * highResHeight)) {
8982 highRes++;
8983 }
8984 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8985 return true;
8986 } else {
8987 return false;
8988 }
8989}
8990
8991/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008992 * FUNCTION : getPDStatIndex
8993 *
8994 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8995 *
8996 * PARAMETERS :
8997 * @caps : camera capabilities
8998 *
8999 * RETURN : int32_t type
9000 * non-negative - on success
9001 * -1 - on failure
9002 *==========================================================================*/
9003int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9004 if (nullptr == caps) {
9005 return -1;
9006 }
9007
9008 uint32_t metaRawCount = caps->meta_raw_channel_count;
9009 int32_t ret = -1;
9010 for (size_t i = 0; i < metaRawCount; i++) {
9011 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9012 ret = i;
9013 break;
9014 }
9015 }
9016
9017 return ret;
9018}
9019
9020/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009021 * FUNCTION : initStaticMetadata
9022 *
9023 * DESCRIPTION: initialize the static metadata
9024 *
9025 * PARAMETERS :
9026 * @cameraId : camera Id
9027 *
9028 * RETURN : int32_t type of status
9029 * 0 -- success
9030 * non-zero failure code
9031 *==========================================================================*/
9032int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9033{
9034 int rc = 0;
9035 CameraMetadata staticInfo;
9036 size_t count = 0;
9037 bool limitedDevice = false;
9038 char prop[PROPERTY_VALUE_MAX];
9039 bool supportBurst = false;
9040
9041 supportBurst = supportBurstCapture(cameraId);
9042
9043 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9044 * guaranteed or if min fps of max resolution is less than 20 fps, its
9045 * advertised as limited device*/
9046 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9047 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9048 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9049 !supportBurst;
9050
9051 uint8_t supportedHwLvl = limitedDevice ?
9052 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009053#ifndef USE_HAL_3_3
9054 // LEVEL_3 - This device will support level 3.
9055 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9056#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009057 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009058#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009059
9060 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9061 &supportedHwLvl, 1);
9062
9063 bool facingBack = false;
9064 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9065 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9066 facingBack = true;
9067 }
9068 /*HAL 3 only*/
9069 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9070 &gCamCapability[cameraId]->min_focus_distance, 1);
9071
9072 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9073 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9074
9075 /*should be using focal lengths but sensor doesn't provide that info now*/
9076 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9077 &gCamCapability[cameraId]->focal_length,
9078 1);
9079
9080 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9081 gCamCapability[cameraId]->apertures,
9082 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9083
9084 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9085 gCamCapability[cameraId]->filter_densities,
9086 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9087
9088
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009089 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9090 size_t mode_count =
9091 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9092 for (size_t i = 0; i < mode_count; i++) {
9093 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9094 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009095 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009096 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009097
9098 int32_t lens_shading_map_size[] = {
9099 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9100 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9101 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9102 lens_shading_map_size,
9103 sizeof(lens_shading_map_size)/sizeof(int32_t));
9104
9105 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9106 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9107
9108 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9109 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9110
9111 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9112 &gCamCapability[cameraId]->max_frame_duration, 1);
9113
9114 camera_metadata_rational baseGainFactor = {
9115 gCamCapability[cameraId]->base_gain_factor.numerator,
9116 gCamCapability[cameraId]->base_gain_factor.denominator};
9117 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9118 &baseGainFactor, 1);
9119
9120 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9121 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9122
9123 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9124 gCamCapability[cameraId]->pixel_array_size.height};
9125 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9126 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9127
9128 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9129 gCamCapability[cameraId]->active_array_size.top,
9130 gCamCapability[cameraId]->active_array_size.width,
9131 gCamCapability[cameraId]->active_array_size.height};
9132 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9133 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9134
9135 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9136 &gCamCapability[cameraId]->white_level, 1);
9137
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009138 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9139 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9140 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009141 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009142 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009143
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009144#ifndef USE_HAL_3_3
9145 bool hasBlackRegions = false;
9146 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9147 LOGW("black_region_count: %d is bounded to %d",
9148 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9149 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9150 }
9151 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9152 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9153 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9154 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9155 }
9156 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9157 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9158 hasBlackRegions = true;
9159 }
9160#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009161 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9162 &gCamCapability[cameraId]->flash_charge_duration, 1);
9163
9164 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9165 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9166
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009167 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9168 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9169 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009170 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9171 &timestampSource, 1);
9172
Thierry Strudel54dc9782017-02-15 12:12:10 -08009173 //update histogram vendor data
9174 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009175 &gCamCapability[cameraId]->histogram_size, 1);
9176
Thierry Strudel54dc9782017-02-15 12:12:10 -08009177 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009178 &gCamCapability[cameraId]->max_histogram_count, 1);
9179
Shuzhen Wang14415f52016-11-16 18:26:18 -08009180 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9181 //so that app can request fewer number of bins than the maximum supported.
9182 std::vector<int32_t> histBins;
9183 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9184 histBins.push_back(maxHistBins);
9185 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9186 (maxHistBins & 0x1) == 0) {
9187 histBins.push_back(maxHistBins >> 1);
9188 maxHistBins >>= 1;
9189 }
9190 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9191 histBins.data(), histBins.size());
9192
Thierry Strudel3d639192016-09-09 11:52:26 -07009193 int32_t sharpness_map_size[] = {
9194 gCamCapability[cameraId]->sharpness_map_size.width,
9195 gCamCapability[cameraId]->sharpness_map_size.height};
9196
9197 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9198 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9199
9200 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9201 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9202
Emilian Peev0f3c3162017-03-15 12:57:46 +00009203 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9204 if (0 <= indexPD) {
9205 // Advertise PD stats data as part of the Depth capabilities
9206 int32_t depthWidth =
9207 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9208 int32_t depthHeight =
9209 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9210 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9211 assert(0 < depthSamplesCount);
9212 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9213 &depthSamplesCount, 1);
9214
9215 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9216 depthHeight,
9217 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9218 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9219 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9220 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9221 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9222
9223 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9224 depthHeight, 33333333,
9225 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9226 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9227 depthMinDuration,
9228 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9229
9230 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9231 depthHeight, 0,
9232 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9233 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9234 depthStallDuration,
9235 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9236
9237 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9238 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9239 }
9240
Thierry Strudel3d639192016-09-09 11:52:26 -07009241 int32_t scalar_formats[] = {
9242 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9243 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9244 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9245 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9246 HAL_PIXEL_FORMAT_RAW10,
9247 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009248 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9249 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9250 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009251
9252 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9253 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9254 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9255 count, MAX_SIZES_CNT, available_processed_sizes);
9256 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9257 available_processed_sizes, count * 2);
9258
9259 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9260 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9261 makeTable(gCamCapability[cameraId]->raw_dim,
9262 count, MAX_SIZES_CNT, available_raw_sizes);
9263 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9264 available_raw_sizes, count * 2);
9265
9266 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9267 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9268 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9269 count, MAX_SIZES_CNT, available_fps_ranges);
9270 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9271 available_fps_ranges, count * 2);
9272
9273 camera_metadata_rational exposureCompensationStep = {
9274 gCamCapability[cameraId]->exp_compensation_step.numerator,
9275 gCamCapability[cameraId]->exp_compensation_step.denominator};
9276 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9277 &exposureCompensationStep, 1);
9278
9279 Vector<uint8_t> availableVstabModes;
9280 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9281 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009282 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009283 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009284 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009285 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009286 count = IS_TYPE_MAX;
9287 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9288 for (size_t i = 0; i < count; i++) {
9289 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9290 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9291 eisSupported = true;
9292 break;
9293 }
9294 }
9295 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009296 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9297 }
9298 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9299 availableVstabModes.array(), availableVstabModes.size());
9300
9301 /*HAL 1 and HAL 3 common*/
9302 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9303 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9304 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009305 // Cap the max zoom to the max preferred value
9306 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009307 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9308 &maxZoom, 1);
9309
9310 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9311 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9312
9313 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9314 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9315 max3aRegions[2] = 0; /* AF not supported */
9316 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9317 max3aRegions, 3);
9318
9319 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9320 memset(prop, 0, sizeof(prop));
9321 property_get("persist.camera.facedetect", prop, "1");
9322 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9323 LOGD("Support face detection mode: %d",
9324 supportedFaceDetectMode);
9325
9326 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009327 /* support mode should be OFF if max number of face is 0 */
9328 if (maxFaces <= 0) {
9329 supportedFaceDetectMode = 0;
9330 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009331 Vector<uint8_t> availableFaceDetectModes;
9332 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9333 if (supportedFaceDetectMode == 1) {
9334 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9335 } else if (supportedFaceDetectMode == 2) {
9336 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9337 } else if (supportedFaceDetectMode == 3) {
9338 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9339 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9340 } else {
9341 maxFaces = 0;
9342 }
9343 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9344 availableFaceDetectModes.array(),
9345 availableFaceDetectModes.size());
9346 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9347 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009348 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9349 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9350 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009351
9352 int32_t exposureCompensationRange[] = {
9353 gCamCapability[cameraId]->exposure_compensation_min,
9354 gCamCapability[cameraId]->exposure_compensation_max};
9355 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9356 exposureCompensationRange,
9357 sizeof(exposureCompensationRange)/sizeof(int32_t));
9358
9359 uint8_t lensFacing = (facingBack) ?
9360 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9361 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9362
9363 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9364 available_thumbnail_sizes,
9365 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9366
9367 /*all sizes will be clubbed into this tag*/
9368 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9369 /*android.scaler.availableStreamConfigurations*/
9370 Vector<int32_t> available_stream_configs;
9371 cam_dimension_t active_array_dim;
9372 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9373 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009374
9375 /*advertise list of input dimensions supported based on below property.
9376 By default all sizes upto 5MP will be advertised.
9377 Note that the setprop resolution format should be WxH.
9378 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9379 To list all supported sizes, setprop needs to be set with "0x0" */
9380 cam_dimension_t minInputSize = {2592,1944}; //5MP
9381 memset(prop, 0, sizeof(prop));
9382 property_get("persist.camera.input.minsize", prop, "2592x1944");
9383 if (strlen(prop) > 0) {
9384 char *saveptr = NULL;
9385 char *token = strtok_r(prop, "x", &saveptr);
9386 if (token != NULL) {
9387 minInputSize.width = atoi(token);
9388 }
9389 token = strtok_r(NULL, "x", &saveptr);
9390 if (token != NULL) {
9391 minInputSize.height = atoi(token);
9392 }
9393 }
9394
Thierry Strudel3d639192016-09-09 11:52:26 -07009395 /* Add input/output stream configurations for each scalar formats*/
9396 for (size_t j = 0; j < scalar_formats_count; j++) {
9397 switch (scalar_formats[j]) {
9398 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9399 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9400 case HAL_PIXEL_FORMAT_RAW10:
9401 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9402 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9403 addStreamConfig(available_stream_configs, scalar_formats[j],
9404 gCamCapability[cameraId]->raw_dim[i],
9405 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9406 }
9407 break;
9408 case HAL_PIXEL_FORMAT_BLOB:
9409 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9410 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9411 addStreamConfig(available_stream_configs, scalar_formats[j],
9412 gCamCapability[cameraId]->picture_sizes_tbl[i],
9413 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9414 }
9415 break;
9416 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9417 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9418 default:
9419 cam_dimension_t largest_picture_size;
9420 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9421 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9422 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9423 addStreamConfig(available_stream_configs, scalar_formats[j],
9424 gCamCapability[cameraId]->picture_sizes_tbl[i],
9425 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009426 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9427 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9428 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9429 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9430 >= minInputSize.width) || (gCamCapability[cameraId]->
9431 picture_sizes_tbl[i].height >= minInputSize.height)) {
9432 addStreamConfig(available_stream_configs, scalar_formats[j],
9433 gCamCapability[cameraId]->picture_sizes_tbl[i],
9434 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9435 }
9436 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009437 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009438
Thierry Strudel3d639192016-09-09 11:52:26 -07009439 break;
9440 }
9441 }
9442
9443 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9444 available_stream_configs.array(), available_stream_configs.size());
9445 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9446 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9447
9448 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9449 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9450
9451 /* android.scaler.availableMinFrameDurations */
9452 Vector<int64_t> available_min_durations;
9453 for (size_t j = 0; j < scalar_formats_count; j++) {
9454 switch (scalar_formats[j]) {
9455 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9456 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9457 case HAL_PIXEL_FORMAT_RAW10:
9458 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9459 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9460 available_min_durations.add(scalar_formats[j]);
9461 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9462 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9463 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9464 }
9465 break;
9466 default:
9467 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9468 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9469 available_min_durations.add(scalar_formats[j]);
9470 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9471 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9472 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9473 }
9474 break;
9475 }
9476 }
9477 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9478 available_min_durations.array(), available_min_durations.size());
9479
9480 Vector<int32_t> available_hfr_configs;
9481 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9482 int32_t fps = 0;
9483 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9484 case CAM_HFR_MODE_60FPS:
9485 fps = 60;
9486 break;
9487 case CAM_HFR_MODE_90FPS:
9488 fps = 90;
9489 break;
9490 case CAM_HFR_MODE_120FPS:
9491 fps = 120;
9492 break;
9493 case CAM_HFR_MODE_150FPS:
9494 fps = 150;
9495 break;
9496 case CAM_HFR_MODE_180FPS:
9497 fps = 180;
9498 break;
9499 case CAM_HFR_MODE_210FPS:
9500 fps = 210;
9501 break;
9502 case CAM_HFR_MODE_240FPS:
9503 fps = 240;
9504 break;
9505 case CAM_HFR_MODE_480FPS:
9506 fps = 480;
9507 break;
9508 case CAM_HFR_MODE_OFF:
9509 case CAM_HFR_MODE_MAX:
9510 default:
9511 break;
9512 }
9513
9514 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9515 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9516 /* For each HFR frame rate, need to advertise one variable fps range
9517 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9518 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9519 * set by the app. When video recording is started, [120, 120] is
9520 * set. This way sensor configuration does not change when recording
9521 * is started */
9522
9523 /* (width, height, fps_min, fps_max, batch_size_max) */
9524 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9525 j < MAX_SIZES_CNT; j++) {
9526 available_hfr_configs.add(
9527 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9528 available_hfr_configs.add(
9529 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9530 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9531 available_hfr_configs.add(fps);
9532 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9533
9534 /* (width, height, fps_min, fps_max, batch_size_max) */
9535 available_hfr_configs.add(
9536 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9537 available_hfr_configs.add(
9538 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9539 available_hfr_configs.add(fps);
9540 available_hfr_configs.add(fps);
9541 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9542 }
9543 }
9544 }
9545 //Advertise HFR capability only if the property is set
9546 memset(prop, 0, sizeof(prop));
9547 property_get("persist.camera.hal3hfr.enable", prop, "1");
9548 uint8_t hfrEnable = (uint8_t)atoi(prop);
9549
9550 if(hfrEnable && available_hfr_configs.array()) {
9551 staticInfo.update(
9552 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9553 available_hfr_configs.array(), available_hfr_configs.size());
9554 }
9555
9556 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9557 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9558 &max_jpeg_size, 1);
9559
9560 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9561 size_t size = 0;
9562 count = CAM_EFFECT_MODE_MAX;
9563 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9564 for (size_t i = 0; i < count; i++) {
9565 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9566 gCamCapability[cameraId]->supported_effects[i]);
9567 if (NAME_NOT_FOUND != val) {
9568 avail_effects[size] = (uint8_t)val;
9569 size++;
9570 }
9571 }
9572 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9573 avail_effects,
9574 size);
9575
9576 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9577 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9578 size_t supported_scene_modes_cnt = 0;
9579 count = CAM_SCENE_MODE_MAX;
9580 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9581 for (size_t i = 0; i < count; i++) {
9582 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9583 CAM_SCENE_MODE_OFF) {
9584 int val = lookupFwkName(SCENE_MODES_MAP,
9585 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9586 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009587
Thierry Strudel3d639192016-09-09 11:52:26 -07009588 if (NAME_NOT_FOUND != val) {
9589 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9590 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9591 supported_scene_modes_cnt++;
9592 }
9593 }
9594 }
9595 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9596 avail_scene_modes,
9597 supported_scene_modes_cnt);
9598
9599 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9600 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9601 supported_scene_modes_cnt,
9602 CAM_SCENE_MODE_MAX,
9603 scene_mode_overrides,
9604 supported_indexes,
9605 cameraId);
9606
9607 if (supported_scene_modes_cnt == 0) {
9608 supported_scene_modes_cnt = 1;
9609 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9610 }
9611
9612 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9613 scene_mode_overrides, supported_scene_modes_cnt * 3);
9614
9615 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9616 ANDROID_CONTROL_MODE_AUTO,
9617 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9618 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9619 available_control_modes,
9620 3);
9621
9622 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9623 size = 0;
9624 count = CAM_ANTIBANDING_MODE_MAX;
9625 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9626 for (size_t i = 0; i < count; i++) {
9627 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9628 gCamCapability[cameraId]->supported_antibandings[i]);
9629 if (NAME_NOT_FOUND != val) {
9630 avail_antibanding_modes[size] = (uint8_t)val;
9631 size++;
9632 }
9633
9634 }
9635 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9636 avail_antibanding_modes,
9637 size);
9638
9639 uint8_t avail_abberation_modes[] = {
9640 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9641 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9642 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9643 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9644 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9645 if (0 == count) {
9646 // If no aberration correction modes are available for a device, this advertise OFF mode
9647 size = 1;
9648 } else {
9649 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9650 // So, advertize all 3 modes if atleast any one mode is supported as per the
9651 // new M requirement
9652 size = 3;
9653 }
9654 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9655 avail_abberation_modes,
9656 size);
9657
9658 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9659 size = 0;
9660 count = CAM_FOCUS_MODE_MAX;
9661 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9662 for (size_t i = 0; i < count; i++) {
9663 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9664 gCamCapability[cameraId]->supported_focus_modes[i]);
9665 if (NAME_NOT_FOUND != val) {
9666 avail_af_modes[size] = (uint8_t)val;
9667 size++;
9668 }
9669 }
9670 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9671 avail_af_modes,
9672 size);
9673
9674 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9675 size = 0;
9676 count = CAM_WB_MODE_MAX;
9677 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9678 for (size_t i = 0; i < count; i++) {
9679 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9680 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9681 gCamCapability[cameraId]->supported_white_balances[i]);
9682 if (NAME_NOT_FOUND != val) {
9683 avail_awb_modes[size] = (uint8_t)val;
9684 size++;
9685 }
9686 }
9687 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9688 avail_awb_modes,
9689 size);
9690
9691 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9692 count = CAM_FLASH_FIRING_LEVEL_MAX;
9693 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9694 count);
9695 for (size_t i = 0; i < count; i++) {
9696 available_flash_levels[i] =
9697 gCamCapability[cameraId]->supported_firing_levels[i];
9698 }
9699 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9700 available_flash_levels, count);
9701
9702 uint8_t flashAvailable;
9703 if (gCamCapability[cameraId]->flash_available)
9704 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9705 else
9706 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9707 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9708 &flashAvailable, 1);
9709
9710 Vector<uint8_t> avail_ae_modes;
9711 count = CAM_AE_MODE_MAX;
9712 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9713 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009714 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9715 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9716 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9717 }
9718 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009719 }
9720 if (flashAvailable) {
9721 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9722 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9723 }
9724 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9725 avail_ae_modes.array(),
9726 avail_ae_modes.size());
9727
9728 int32_t sensitivity_range[2];
9729 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9730 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9731 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9732 sensitivity_range,
9733 sizeof(sensitivity_range) / sizeof(int32_t));
9734
9735 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9736 &gCamCapability[cameraId]->max_analog_sensitivity,
9737 1);
9738
9739 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9740 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9741 &sensor_orientation,
9742 1);
9743
9744 int32_t max_output_streams[] = {
9745 MAX_STALLING_STREAMS,
9746 MAX_PROCESSED_STREAMS,
9747 MAX_RAW_STREAMS};
9748 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9749 max_output_streams,
9750 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9751
9752 uint8_t avail_leds = 0;
9753 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9754 &avail_leds, 0);
9755
9756 uint8_t focus_dist_calibrated;
9757 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9758 gCamCapability[cameraId]->focus_dist_calibrated);
9759 if (NAME_NOT_FOUND != val) {
9760 focus_dist_calibrated = (uint8_t)val;
9761 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9762 &focus_dist_calibrated, 1);
9763 }
9764
9765 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9766 size = 0;
9767 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9768 MAX_TEST_PATTERN_CNT);
9769 for (size_t i = 0; i < count; i++) {
9770 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9771 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9772 if (NAME_NOT_FOUND != testpatternMode) {
9773 avail_testpattern_modes[size] = testpatternMode;
9774 size++;
9775 }
9776 }
9777 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9778 avail_testpattern_modes,
9779 size);
9780
9781 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9782 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9783 &max_pipeline_depth,
9784 1);
9785
9786 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9787 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9788 &partial_result_count,
9789 1);
9790
9791 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9792 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9793
9794 Vector<uint8_t> available_capabilities;
9795 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9796 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9797 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9798 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9799 if (supportBurst) {
9800 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9801 }
9802 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9803 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9804 if (hfrEnable && available_hfr_configs.array()) {
9805 available_capabilities.add(
9806 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9807 }
9808
9809 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9810 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9811 }
9812 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9813 available_capabilities.array(),
9814 available_capabilities.size());
9815
9816 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9817 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9818 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9819 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9820
9821 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9822 &aeLockAvailable, 1);
9823
9824 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9825 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9826 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9827 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9828
9829 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9830 &awbLockAvailable, 1);
9831
9832 int32_t max_input_streams = 1;
9833 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9834 &max_input_streams,
9835 1);
9836
9837 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9838 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9839 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9840 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9841 HAL_PIXEL_FORMAT_YCbCr_420_888};
9842 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9843 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9844
9845 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9846 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9847 &max_latency,
9848 1);
9849
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009850#ifndef USE_HAL_3_3
9851 int32_t isp_sensitivity_range[2];
9852 isp_sensitivity_range[0] =
9853 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9854 isp_sensitivity_range[1] =
9855 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9856 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9857 isp_sensitivity_range,
9858 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9859#endif
9860
Thierry Strudel3d639192016-09-09 11:52:26 -07009861 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9862 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9863 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9864 available_hot_pixel_modes,
9865 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9866
9867 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9868 ANDROID_SHADING_MODE_FAST,
9869 ANDROID_SHADING_MODE_HIGH_QUALITY};
9870 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9871 available_shading_modes,
9872 3);
9873
9874 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9875 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9876 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9877 available_lens_shading_map_modes,
9878 2);
9879
9880 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9881 ANDROID_EDGE_MODE_FAST,
9882 ANDROID_EDGE_MODE_HIGH_QUALITY,
9883 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9884 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9885 available_edge_modes,
9886 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9887
9888 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9889 ANDROID_NOISE_REDUCTION_MODE_FAST,
9890 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9891 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9892 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9893 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9894 available_noise_red_modes,
9895 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9896
9897 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9898 ANDROID_TONEMAP_MODE_FAST,
9899 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9900 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9901 available_tonemap_modes,
9902 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9903
9904 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9905 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9906 available_hot_pixel_map_modes,
9907 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9908
9909 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9910 gCamCapability[cameraId]->reference_illuminant1);
9911 if (NAME_NOT_FOUND != val) {
9912 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9913 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9914 }
9915
9916 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9917 gCamCapability[cameraId]->reference_illuminant2);
9918 if (NAME_NOT_FOUND != val) {
9919 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9920 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9921 }
9922
9923 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9924 (void *)gCamCapability[cameraId]->forward_matrix1,
9925 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9926
9927 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9928 (void *)gCamCapability[cameraId]->forward_matrix2,
9929 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9930
9931 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9932 (void *)gCamCapability[cameraId]->color_transform1,
9933 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9934
9935 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9936 (void *)gCamCapability[cameraId]->color_transform2,
9937 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9938
9939 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9940 (void *)gCamCapability[cameraId]->calibration_transform1,
9941 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9942
9943 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9944 (void *)gCamCapability[cameraId]->calibration_transform2,
9945 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9946
9947 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9948 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9949 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9950 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9951 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9952 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9953 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9954 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9955 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9956 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9957 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9958 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9959 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9960 ANDROID_JPEG_GPS_COORDINATES,
9961 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9962 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9963 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9964 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9965 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9966 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9967 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9968 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9969 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9970 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009971#ifndef USE_HAL_3_3
9972 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9973#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009974 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009975 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009976 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9977 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009978 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009979 /* DevCamDebug metadata request_keys_basic */
9980 DEVCAMDEBUG_META_ENABLE,
9981 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009982 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009983 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07009984 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -07009985 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -08009986 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009987
9988 size_t request_keys_cnt =
9989 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9990 Vector<int32_t> available_request_keys;
9991 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9992 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9993 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9994 }
9995
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009996 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +00009997 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009998 }
9999
Thierry Strudel3d639192016-09-09 11:52:26 -070010000 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10001 available_request_keys.array(), available_request_keys.size());
10002
10003 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10004 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10005 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10006 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10007 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10008 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10009 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10010 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10011 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10012 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10013 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10014 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10015 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10016 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10017 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10018 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10019 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010020 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010021 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10022 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10023 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010024 ANDROID_STATISTICS_FACE_SCORES,
10025#ifndef USE_HAL_3_3
10026 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10027#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010028 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010029 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010030 // DevCamDebug metadata result_keys_basic
10031 DEVCAMDEBUG_META_ENABLE,
10032 // DevCamDebug metadata result_keys AF
10033 DEVCAMDEBUG_AF_LENS_POSITION,
10034 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10035 DEVCAMDEBUG_AF_TOF_DISTANCE,
10036 DEVCAMDEBUG_AF_LUMA,
10037 DEVCAMDEBUG_AF_HAF_STATE,
10038 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10039 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10040 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10041 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10042 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10043 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10044 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10045 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10046 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10047 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10048 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10049 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10050 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10051 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10052 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10053 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10054 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10055 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10056 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10057 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10058 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10059 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10060 // DevCamDebug metadata result_keys AEC
10061 DEVCAMDEBUG_AEC_TARGET_LUMA,
10062 DEVCAMDEBUG_AEC_COMP_LUMA,
10063 DEVCAMDEBUG_AEC_AVG_LUMA,
10064 DEVCAMDEBUG_AEC_CUR_LUMA,
10065 DEVCAMDEBUG_AEC_LINECOUNT,
10066 DEVCAMDEBUG_AEC_REAL_GAIN,
10067 DEVCAMDEBUG_AEC_EXP_INDEX,
10068 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010069 // DevCamDebug metadata result_keys zzHDR
10070 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10071 DEVCAMDEBUG_AEC_L_LINECOUNT,
10072 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10073 DEVCAMDEBUG_AEC_S_LINECOUNT,
10074 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10075 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10076 // DevCamDebug metadata result_keys ADRC
10077 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10078 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10079 DEVCAMDEBUG_AEC_GTM_RATIO,
10080 DEVCAMDEBUG_AEC_LTM_RATIO,
10081 DEVCAMDEBUG_AEC_LA_RATIO,
10082 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010083 // DevCamDebug metadata result_keys AWB
10084 DEVCAMDEBUG_AWB_R_GAIN,
10085 DEVCAMDEBUG_AWB_G_GAIN,
10086 DEVCAMDEBUG_AWB_B_GAIN,
10087 DEVCAMDEBUG_AWB_CCT,
10088 DEVCAMDEBUG_AWB_DECISION,
10089 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010090 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10091 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10092 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010093 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010094 };
10095
Thierry Strudel3d639192016-09-09 11:52:26 -070010096 size_t result_keys_cnt =
10097 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10098
10099 Vector<int32_t> available_result_keys;
10100 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10101 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10102 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10103 }
10104 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10105 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10106 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10107 }
10108 if (supportedFaceDetectMode == 1) {
10109 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10110 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10111 } else if ((supportedFaceDetectMode == 2) ||
10112 (supportedFaceDetectMode == 3)) {
10113 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10114 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10115 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010116#ifndef USE_HAL_3_3
10117 if (hasBlackRegions) {
10118 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10119 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10120 }
10121#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010122
10123 if (gExposeEnableZslKey) {
10124 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10125 }
10126
Thierry Strudel3d639192016-09-09 11:52:26 -070010127 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10128 available_result_keys.array(), available_result_keys.size());
10129
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010130 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010131 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10132 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10133 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10134 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10135 ANDROID_SCALER_CROPPING_TYPE,
10136 ANDROID_SYNC_MAX_LATENCY,
10137 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10138 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10139 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10140 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10141 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10142 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10143 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10144 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10145 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10146 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10147 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10148 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10149 ANDROID_LENS_FACING,
10150 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10151 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10152 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10153 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10154 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10155 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10156 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10157 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10158 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10159 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10160 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10161 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10162 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10163 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10164 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10165 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10166 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10167 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10168 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10169 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010170 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010171 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10172 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10173 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10174 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10175 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10176 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10177 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10178 ANDROID_CONTROL_AVAILABLE_MODES,
10179 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10180 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10181 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10182 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010183 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10184#ifndef USE_HAL_3_3
10185 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10186 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10187#endif
10188 };
10189
10190 Vector<int32_t> available_characteristics_keys;
10191 available_characteristics_keys.appendArray(characteristics_keys_basic,
10192 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10193#ifndef USE_HAL_3_3
10194 if (hasBlackRegions) {
10195 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10196 }
10197#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010198
10199 if (0 <= indexPD) {
10200 int32_t depthKeys[] = {
10201 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10202 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10203 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10204 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10205 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10206 };
10207 available_characteristics_keys.appendArray(depthKeys,
10208 sizeof(depthKeys) / sizeof(depthKeys[0]));
10209 }
10210
Thierry Strudel3d639192016-09-09 11:52:26 -070010211 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010212 available_characteristics_keys.array(),
10213 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010214
10215 /*available stall durations depend on the hw + sw and will be different for different devices */
10216 /*have to add for raw after implementation*/
10217 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10218 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10219
10220 Vector<int64_t> available_stall_durations;
10221 for (uint32_t j = 0; j < stall_formats_count; j++) {
10222 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10223 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10224 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10225 available_stall_durations.add(stall_formats[j]);
10226 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10227 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10228 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10229 }
10230 } else {
10231 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10232 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10233 available_stall_durations.add(stall_formats[j]);
10234 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10235 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10236 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10237 }
10238 }
10239 }
10240 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10241 available_stall_durations.array(),
10242 available_stall_durations.size());
10243
10244 //QCAMERA3_OPAQUE_RAW
10245 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10246 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10247 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10248 case LEGACY_RAW:
10249 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10250 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10251 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10252 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10253 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10254 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10255 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10256 break;
10257 case MIPI_RAW:
10258 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10259 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10260 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10261 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10262 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10263 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10264 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10265 break;
10266 default:
10267 LOGE("unknown opaque_raw_format %d",
10268 gCamCapability[cameraId]->opaque_raw_fmt);
10269 break;
10270 }
10271 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10272
10273 Vector<int32_t> strides;
10274 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10275 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10276 cam_stream_buf_plane_info_t buf_planes;
10277 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10278 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10279 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10280 &gCamCapability[cameraId]->padding_info, &buf_planes);
10281 strides.add(buf_planes.plane_info.mp[0].stride);
10282 }
10283 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10284 strides.size());
10285
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010286 //TBD: remove the following line once backend advertises zzHDR in feature mask
10287 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010288 //Video HDR default
10289 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10290 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010291 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010292 int32_t vhdr_mode[] = {
10293 QCAMERA3_VIDEO_HDR_MODE_OFF,
10294 QCAMERA3_VIDEO_HDR_MODE_ON};
10295
10296 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10297 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10298 vhdr_mode, vhdr_mode_count);
10299 }
10300
Thierry Strudel3d639192016-09-09 11:52:26 -070010301 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10302 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10303 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10304
10305 uint8_t isMonoOnly =
10306 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10307 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10308 &isMonoOnly, 1);
10309
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010310#ifndef USE_HAL_3_3
10311 Vector<int32_t> opaque_size;
10312 for (size_t j = 0; j < scalar_formats_count; j++) {
10313 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10314 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10315 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10316 cam_stream_buf_plane_info_t buf_planes;
10317
10318 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10319 &gCamCapability[cameraId]->padding_info, &buf_planes);
10320
10321 if (rc == 0) {
10322 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10323 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10324 opaque_size.add(buf_planes.plane_info.frame_len);
10325 }else {
10326 LOGE("raw frame calculation failed!");
10327 }
10328 }
10329 }
10330 }
10331
10332 if ((opaque_size.size() > 0) &&
10333 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10334 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10335 else
10336 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10337#endif
10338
Thierry Strudel04e026f2016-10-10 11:27:36 -070010339 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10340 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10341 size = 0;
10342 count = CAM_IR_MODE_MAX;
10343 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10344 for (size_t i = 0; i < count; i++) {
10345 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10346 gCamCapability[cameraId]->supported_ir_modes[i]);
10347 if (NAME_NOT_FOUND != val) {
10348 avail_ir_modes[size] = (int32_t)val;
10349 size++;
10350 }
10351 }
10352 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10353 avail_ir_modes, size);
10354 }
10355
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010356 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10357 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10358 size = 0;
10359 count = CAM_AEC_CONVERGENCE_MAX;
10360 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10361 for (size_t i = 0; i < count; i++) {
10362 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10363 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10364 if (NAME_NOT_FOUND != val) {
10365 available_instant_aec_modes[size] = (int32_t)val;
10366 size++;
10367 }
10368 }
10369 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10370 available_instant_aec_modes, size);
10371 }
10372
Thierry Strudel54dc9782017-02-15 12:12:10 -080010373 int32_t sharpness_range[] = {
10374 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10375 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10376 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10377
10378 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10379 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10380 size = 0;
10381 count = CAM_BINNING_CORRECTION_MODE_MAX;
10382 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10383 for (size_t i = 0; i < count; i++) {
10384 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10385 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10386 gCamCapability[cameraId]->supported_binning_modes[i]);
10387 if (NAME_NOT_FOUND != val) {
10388 avail_binning_modes[size] = (int32_t)val;
10389 size++;
10390 }
10391 }
10392 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10393 avail_binning_modes, size);
10394 }
10395
10396 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10397 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10398 size = 0;
10399 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10400 for (size_t i = 0; i < count; i++) {
10401 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10402 gCamCapability[cameraId]->supported_aec_modes[i]);
10403 if (NAME_NOT_FOUND != val)
10404 available_aec_modes[size++] = val;
10405 }
10406 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10407 available_aec_modes, size);
10408 }
10409
10410 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10411 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10412 size = 0;
10413 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10414 for (size_t i = 0; i < count; i++) {
10415 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10416 gCamCapability[cameraId]->supported_iso_modes[i]);
10417 if (NAME_NOT_FOUND != val)
10418 available_iso_modes[size++] = val;
10419 }
10420 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10421 available_iso_modes, size);
10422 }
10423
10424 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010425 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010426 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10427 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10428 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10429
10430 int32_t available_saturation_range[4];
10431 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10432 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10433 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10434 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10435 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10436 available_saturation_range, 4);
10437
10438 uint8_t is_hdr_values[2];
10439 is_hdr_values[0] = 0;
10440 is_hdr_values[1] = 1;
10441 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10442 is_hdr_values, 2);
10443
10444 float is_hdr_confidence_range[2];
10445 is_hdr_confidence_range[0] = 0.0;
10446 is_hdr_confidence_range[1] = 1.0;
10447 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10448 is_hdr_confidence_range, 2);
10449
Emilian Peev0a972ef2017-03-16 10:25:53 +000010450 size_t eepromLength = strnlen(
10451 reinterpret_cast<const char *>(
10452 gCamCapability[cameraId]->eeprom_version_info),
10453 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10454 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010455 char easelInfo[] = ",E:N";
10456 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10457 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10458 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010459 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10460 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010461 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010462 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10463 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10464 }
10465
Thierry Strudel3d639192016-09-09 11:52:26 -070010466 gStaticMetadata[cameraId] = staticInfo.release();
10467 return rc;
10468}
10469
10470/*===========================================================================
10471 * FUNCTION : makeTable
10472 *
10473 * DESCRIPTION: make a table of sizes
10474 *
10475 * PARAMETERS :
10476 *
10477 *
10478 *==========================================================================*/
10479void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10480 size_t max_size, int32_t *sizeTable)
10481{
10482 size_t j = 0;
10483 if (size > max_size) {
10484 size = max_size;
10485 }
10486 for (size_t i = 0; i < size; i++) {
10487 sizeTable[j] = dimTable[i].width;
10488 sizeTable[j+1] = dimTable[i].height;
10489 j+=2;
10490 }
10491}
10492
10493/*===========================================================================
10494 * FUNCTION : makeFPSTable
10495 *
10496 * DESCRIPTION: make a table of fps ranges
10497 *
10498 * PARAMETERS :
10499 *
10500 *==========================================================================*/
10501void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10502 size_t max_size, int32_t *fpsRangesTable)
10503{
10504 size_t j = 0;
10505 if (size > max_size) {
10506 size = max_size;
10507 }
10508 for (size_t i = 0; i < size; i++) {
10509 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10510 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10511 j+=2;
10512 }
10513}
10514
10515/*===========================================================================
10516 * FUNCTION : makeOverridesList
10517 *
10518 * DESCRIPTION: make a list of scene mode overrides
10519 *
10520 * PARAMETERS :
10521 *
10522 *
10523 *==========================================================================*/
10524void QCamera3HardwareInterface::makeOverridesList(
10525 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10526 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10527{
10528 /*daemon will give a list of overrides for all scene modes.
10529 However we should send the fwk only the overrides for the scene modes
10530 supported by the framework*/
10531 size_t j = 0;
10532 if (size > max_size) {
10533 size = max_size;
10534 }
10535 size_t focus_count = CAM_FOCUS_MODE_MAX;
10536 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10537 focus_count);
10538 for (size_t i = 0; i < size; i++) {
10539 bool supt = false;
10540 size_t index = supported_indexes[i];
10541 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10542 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10543 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10544 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10545 overridesTable[index].awb_mode);
10546 if (NAME_NOT_FOUND != val) {
10547 overridesList[j+1] = (uint8_t)val;
10548 }
10549 uint8_t focus_override = overridesTable[index].af_mode;
10550 for (size_t k = 0; k < focus_count; k++) {
10551 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10552 supt = true;
10553 break;
10554 }
10555 }
10556 if (supt) {
10557 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10558 focus_override);
10559 if (NAME_NOT_FOUND != val) {
10560 overridesList[j+2] = (uint8_t)val;
10561 }
10562 } else {
10563 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10564 }
10565 j+=3;
10566 }
10567}
10568
10569/*===========================================================================
10570 * FUNCTION : filterJpegSizes
10571 *
10572 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10573 * could be downscaled to
10574 *
10575 * PARAMETERS :
10576 *
10577 * RETURN : length of jpegSizes array
10578 *==========================================================================*/
10579
10580size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10581 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10582 uint8_t downscale_factor)
10583{
10584 if (0 == downscale_factor) {
10585 downscale_factor = 1;
10586 }
10587
10588 int32_t min_width = active_array_size.width / downscale_factor;
10589 int32_t min_height = active_array_size.height / downscale_factor;
10590 size_t jpegSizesCnt = 0;
10591 if (processedSizesCnt > maxCount) {
10592 processedSizesCnt = maxCount;
10593 }
10594 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10595 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10596 jpegSizes[jpegSizesCnt] = processedSizes[i];
10597 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10598 jpegSizesCnt += 2;
10599 }
10600 }
10601 return jpegSizesCnt;
10602}
10603
10604/*===========================================================================
10605 * FUNCTION : computeNoiseModelEntryS
10606 *
10607 * DESCRIPTION: function to map a given sensitivity to the S noise
10608 * model parameters in the DNG noise model.
10609 *
10610 * PARAMETERS : sens : the sensor sensitivity
10611 *
10612 ** RETURN : S (sensor amplification) noise
10613 *
10614 *==========================================================================*/
10615double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10616 double s = gCamCapability[mCameraId]->gradient_S * sens +
10617 gCamCapability[mCameraId]->offset_S;
10618 return ((s < 0.0) ? 0.0 : s);
10619}
10620
10621/*===========================================================================
10622 * FUNCTION : computeNoiseModelEntryO
10623 *
10624 * DESCRIPTION: function to map a given sensitivity to the O noise
10625 * model parameters in the DNG noise model.
10626 *
10627 * PARAMETERS : sens : the sensor sensitivity
10628 *
10629 ** RETURN : O (sensor readout) noise
10630 *
10631 *==========================================================================*/
10632double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10633 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10634 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10635 1.0 : (1.0 * sens / max_analog_sens);
10636 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10637 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10638 return ((o < 0.0) ? 0.0 : o);
10639}
10640
10641/*===========================================================================
10642 * FUNCTION : getSensorSensitivity
10643 *
10644 * DESCRIPTION: convert iso_mode to an integer value
10645 *
10646 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10647 *
10648 ** RETURN : sensitivity supported by sensor
10649 *
10650 *==========================================================================*/
10651int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10652{
10653 int32_t sensitivity;
10654
10655 switch (iso_mode) {
10656 case CAM_ISO_MODE_100:
10657 sensitivity = 100;
10658 break;
10659 case CAM_ISO_MODE_200:
10660 sensitivity = 200;
10661 break;
10662 case CAM_ISO_MODE_400:
10663 sensitivity = 400;
10664 break;
10665 case CAM_ISO_MODE_800:
10666 sensitivity = 800;
10667 break;
10668 case CAM_ISO_MODE_1600:
10669 sensitivity = 1600;
10670 break;
10671 default:
10672 sensitivity = -1;
10673 break;
10674 }
10675 return sensitivity;
10676}
10677
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010678int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010679 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010680 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10681 // to connect to Easel.
10682 bool doNotpowerOnEasel =
10683 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10684
10685 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010686 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10687 return OK;
10688 }
10689
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010690 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010691 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010692 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010693 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010694 return res;
10695 }
10696
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010697 EaselManagerClientOpened = true;
10698
10699 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010700 if (res != OK) {
10701 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10702 }
10703
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010704 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010705 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010706
10707 // Expose enableZsl key only when HDR+ mode is enabled.
10708 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010709 }
10710
10711 return OK;
10712}
10713
Thierry Strudel3d639192016-09-09 11:52:26 -070010714/*===========================================================================
10715 * FUNCTION : getCamInfo
10716 *
10717 * DESCRIPTION: query camera capabilities
10718 *
10719 * PARAMETERS :
10720 * @cameraId : camera Id
10721 * @info : camera info struct to be filled in with camera capabilities
10722 *
10723 * RETURN : int type of status
10724 * NO_ERROR -- success
10725 * none-zero failure code
10726 *==========================================================================*/
10727int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10728 struct camera_info *info)
10729{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010730 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010731 int rc = 0;
10732
10733 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010734
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010735 {
10736 Mutex::Autolock l(gHdrPlusClientLock);
10737 rc = initHdrPlusClientLocked();
10738 if (rc != OK) {
10739 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10740 pthread_mutex_unlock(&gCamLock);
10741 return rc;
10742 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010743 }
10744
Thierry Strudel3d639192016-09-09 11:52:26 -070010745 if (NULL == gCamCapability[cameraId]) {
10746 rc = initCapabilities(cameraId);
10747 if (rc < 0) {
10748 pthread_mutex_unlock(&gCamLock);
10749 return rc;
10750 }
10751 }
10752
10753 if (NULL == gStaticMetadata[cameraId]) {
10754 rc = initStaticMetadata(cameraId);
10755 if (rc < 0) {
10756 pthread_mutex_unlock(&gCamLock);
10757 return rc;
10758 }
10759 }
10760
10761 switch(gCamCapability[cameraId]->position) {
10762 case CAM_POSITION_BACK:
10763 case CAM_POSITION_BACK_AUX:
10764 info->facing = CAMERA_FACING_BACK;
10765 break;
10766
10767 case CAM_POSITION_FRONT:
10768 case CAM_POSITION_FRONT_AUX:
10769 info->facing = CAMERA_FACING_FRONT;
10770 break;
10771
10772 default:
10773 LOGE("Unknown position type %d for camera id:%d",
10774 gCamCapability[cameraId]->position, cameraId);
10775 rc = -1;
10776 break;
10777 }
10778
10779
10780 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010781#ifndef USE_HAL_3_3
10782 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10783#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010784 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010785#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010786 info->static_camera_characteristics = gStaticMetadata[cameraId];
10787
10788 //For now assume both cameras can operate independently.
10789 info->conflicting_devices = NULL;
10790 info->conflicting_devices_length = 0;
10791
10792 //resource cost is 100 * MIN(1.0, m/M),
10793 //where m is throughput requirement with maximum stream configuration
10794 //and M is CPP maximum throughput.
10795 float max_fps = 0.0;
10796 for (uint32_t i = 0;
10797 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10798 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10799 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10800 }
10801 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10802 gCamCapability[cameraId]->active_array_size.width *
10803 gCamCapability[cameraId]->active_array_size.height * max_fps /
10804 gCamCapability[cameraId]->max_pixel_bandwidth;
10805 info->resource_cost = 100 * MIN(1.0, ratio);
10806 LOGI("camera %d resource cost is %d", cameraId,
10807 info->resource_cost);
10808
10809 pthread_mutex_unlock(&gCamLock);
10810 return rc;
10811}
10812
10813/*===========================================================================
10814 * FUNCTION : translateCapabilityToMetadata
10815 *
10816 * DESCRIPTION: translate the capability into camera_metadata_t
10817 *
10818 * PARAMETERS : type of the request
10819 *
10820 *
10821 * RETURN : success: camera_metadata_t*
10822 * failure: NULL
10823 *
10824 *==========================================================================*/
10825camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10826{
10827 if (mDefaultMetadata[type] != NULL) {
10828 return mDefaultMetadata[type];
10829 }
10830 //first time we are handling this request
10831 //fill up the metadata structure using the wrapper class
10832 CameraMetadata settings;
10833 //translate from cam_capability_t to camera_metadata_tag_t
10834 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10835 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10836 int32_t defaultRequestID = 0;
10837 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10838
10839 /* OIS disable */
10840 char ois_prop[PROPERTY_VALUE_MAX];
10841 memset(ois_prop, 0, sizeof(ois_prop));
10842 property_get("persist.camera.ois.disable", ois_prop, "0");
10843 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10844
10845 /* Force video to use OIS */
10846 char videoOisProp[PROPERTY_VALUE_MAX];
10847 memset(videoOisProp, 0, sizeof(videoOisProp));
10848 property_get("persist.camera.ois.video", videoOisProp, "1");
10849 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010850
10851 // Hybrid AE enable/disable
10852 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10853 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10854 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10855 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10856
Thierry Strudel3d639192016-09-09 11:52:26 -070010857 uint8_t controlIntent = 0;
10858 uint8_t focusMode;
10859 uint8_t vsMode;
10860 uint8_t optStabMode;
10861 uint8_t cacMode;
10862 uint8_t edge_mode;
10863 uint8_t noise_red_mode;
10864 uint8_t tonemap_mode;
10865 bool highQualityModeEntryAvailable = FALSE;
10866 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010867 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010868 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10869 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010870 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010871 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010872 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010873
Thierry Strudel3d639192016-09-09 11:52:26 -070010874 switch (type) {
10875 case CAMERA3_TEMPLATE_PREVIEW:
10876 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10877 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10878 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10879 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10880 edge_mode = ANDROID_EDGE_MODE_FAST;
10881 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10882 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10883 break;
10884 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10885 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10886 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10887 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10888 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10889 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10890 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10891 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10892 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10893 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10894 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10895 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10896 highQualityModeEntryAvailable = TRUE;
10897 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10898 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10899 fastModeEntryAvailable = TRUE;
10900 }
10901 }
10902 if (highQualityModeEntryAvailable) {
10903 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10904 } else if (fastModeEntryAvailable) {
10905 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10906 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010907 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10908 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10909 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010910 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010911 break;
10912 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10913 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10914 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10915 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010916 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10917 edge_mode = ANDROID_EDGE_MODE_FAST;
10918 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10919 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10920 if (forceVideoOis)
10921 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10922 break;
10923 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10924 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10925 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10926 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010927 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10928 edge_mode = ANDROID_EDGE_MODE_FAST;
10929 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10930 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10931 if (forceVideoOis)
10932 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10933 break;
10934 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10935 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10936 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10937 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10938 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10939 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10940 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10941 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10942 break;
10943 case CAMERA3_TEMPLATE_MANUAL:
10944 edge_mode = ANDROID_EDGE_MODE_FAST;
10945 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10946 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10947 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10948 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10949 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10950 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10951 break;
10952 default:
10953 edge_mode = ANDROID_EDGE_MODE_FAST;
10954 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10955 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10956 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10957 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10958 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10959 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10960 break;
10961 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010962 // Set CAC to OFF if underlying device doesn't support
10963 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10964 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10965 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010966 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10967 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10968 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10969 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10970 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10971 }
10972 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010973 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010974 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010975
10976 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10977 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10978 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10979 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10980 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10981 || ois_disable)
10982 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10983 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010984 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010985
10986 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10987 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10988
10989 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10990 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10991
10992 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10993 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10994
10995 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10996 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10997
10998 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10999 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11000
11001 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11002 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11003
11004 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11005 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11006
11007 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11008 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11009
11010 /*flash*/
11011 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11012 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11013
11014 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11015 settings.update(ANDROID_FLASH_FIRING_POWER,
11016 &flashFiringLevel, 1);
11017
11018 /* lens */
11019 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11020 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11021
11022 if (gCamCapability[mCameraId]->filter_densities_count) {
11023 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11024 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11025 gCamCapability[mCameraId]->filter_densities_count);
11026 }
11027
11028 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11029 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11030
Thierry Strudel3d639192016-09-09 11:52:26 -070011031 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11032 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11033
11034 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11035 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11036
11037 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11038 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11039
11040 /* face detection (default to OFF) */
11041 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11042 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11043
Thierry Strudel54dc9782017-02-15 12:12:10 -080011044 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11045 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011046
11047 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11048 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11049
11050 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11051 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11052
Thierry Strudel3d639192016-09-09 11:52:26 -070011053
11054 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11055 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11056
11057 /* Exposure time(Update the Min Exposure Time)*/
11058 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11059 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11060
11061 /* frame duration */
11062 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11063 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11064
11065 /* sensitivity */
11066 static const int32_t default_sensitivity = 100;
11067 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011068#ifndef USE_HAL_3_3
11069 static const int32_t default_isp_sensitivity =
11070 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11071 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11072#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011073
11074 /*edge mode*/
11075 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11076
11077 /*noise reduction mode*/
11078 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11079
11080 /*color correction mode*/
11081 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11082 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11083
11084 /*transform matrix mode*/
11085 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11086
11087 int32_t scaler_crop_region[4];
11088 scaler_crop_region[0] = 0;
11089 scaler_crop_region[1] = 0;
11090 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11091 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11092 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11093
11094 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11095 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11096
11097 /*focus distance*/
11098 float focus_distance = 0.0;
11099 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11100
11101 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011102 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011103 float max_range = 0.0;
11104 float max_fixed_fps = 0.0;
11105 int32_t fps_range[2] = {0, 0};
11106 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11107 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011108 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11109 TEMPLATE_MAX_PREVIEW_FPS) {
11110 continue;
11111 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011112 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11113 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11114 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11115 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11116 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11117 if (range > max_range) {
11118 fps_range[0] =
11119 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11120 fps_range[1] =
11121 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11122 max_range = range;
11123 }
11124 } else {
11125 if (range < 0.01 && max_fixed_fps <
11126 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11127 fps_range[0] =
11128 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11129 fps_range[1] =
11130 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11131 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11132 }
11133 }
11134 }
11135 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11136
11137 /*precapture trigger*/
11138 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11139 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11140
11141 /*af trigger*/
11142 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11143 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11144
11145 /* ae & af regions */
11146 int32_t active_region[] = {
11147 gCamCapability[mCameraId]->active_array_size.left,
11148 gCamCapability[mCameraId]->active_array_size.top,
11149 gCamCapability[mCameraId]->active_array_size.left +
11150 gCamCapability[mCameraId]->active_array_size.width,
11151 gCamCapability[mCameraId]->active_array_size.top +
11152 gCamCapability[mCameraId]->active_array_size.height,
11153 0};
11154 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11155 sizeof(active_region) / sizeof(active_region[0]));
11156 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11157 sizeof(active_region) / sizeof(active_region[0]));
11158
11159 /* black level lock */
11160 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11161 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11162
Thierry Strudel3d639192016-09-09 11:52:26 -070011163 //special defaults for manual template
11164 if (type == CAMERA3_TEMPLATE_MANUAL) {
11165 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11166 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11167
11168 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11169 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11170
11171 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11172 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11173
11174 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11175 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11176
11177 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11178 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11179
11180 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11181 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11182 }
11183
11184
11185 /* TNR
11186 * We'll use this location to determine which modes TNR will be set.
11187 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11188 * This is not to be confused with linking on a per stream basis that decision
11189 * is still on per-session basis and will be handled as part of config stream
11190 */
11191 uint8_t tnr_enable = 0;
11192
11193 if (m_bTnrPreview || m_bTnrVideo) {
11194
11195 switch (type) {
11196 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11197 tnr_enable = 1;
11198 break;
11199
11200 default:
11201 tnr_enable = 0;
11202 break;
11203 }
11204
11205 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11206 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11207 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11208
11209 LOGD("TNR:%d with process plate %d for template:%d",
11210 tnr_enable, tnr_process_type, type);
11211 }
11212
11213 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011214 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011215 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11216
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011217 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011218 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11219
Shuzhen Wang920ea402017-05-03 08:49:39 -070011220 uint8_t related_camera_id = mCameraId;
11221 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011222
11223 /* CDS default */
11224 char prop[PROPERTY_VALUE_MAX];
11225 memset(prop, 0, sizeof(prop));
11226 property_get("persist.camera.CDS", prop, "Auto");
11227 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11228 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11229 if (CAM_CDS_MODE_MAX == cds_mode) {
11230 cds_mode = CAM_CDS_MODE_AUTO;
11231 }
11232
11233 /* Disabling CDS in templates which have TNR enabled*/
11234 if (tnr_enable)
11235 cds_mode = CAM_CDS_MODE_OFF;
11236
11237 int32_t mode = cds_mode;
11238 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011239
Thierry Strudel269c81a2016-10-12 12:13:59 -070011240 /* Manual Convergence AEC Speed is disabled by default*/
11241 float default_aec_speed = 0;
11242 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11243
11244 /* Manual Convergence AWB Speed is disabled by default*/
11245 float default_awb_speed = 0;
11246 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11247
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011248 // Set instant AEC to normal convergence by default
11249 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11250 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11251
Shuzhen Wang19463d72016-03-08 11:09:52 -080011252 /* hybrid ae */
11253 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11254
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011255 if (gExposeEnableZslKey) {
11256 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11257 }
11258
Thierry Strudel3d639192016-09-09 11:52:26 -070011259 mDefaultMetadata[type] = settings.release();
11260
11261 return mDefaultMetadata[type];
11262}
11263
11264/*===========================================================================
11265 * FUNCTION : setFrameParameters
11266 *
11267 * DESCRIPTION: set parameters per frame as requested in the metadata from
11268 * framework
11269 *
11270 * PARAMETERS :
11271 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011272 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011273 * @blob_request: Whether this request is a blob request or not
11274 *
11275 * RETURN : success: NO_ERROR
11276 * failure:
11277 *==========================================================================*/
11278int QCamera3HardwareInterface::setFrameParameters(
11279 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011280 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011281 int blob_request,
11282 uint32_t snapshotStreamId)
11283{
11284 /*translate from camera_metadata_t type to parm_type_t*/
11285 int rc = 0;
11286 int32_t hal_version = CAM_HAL_V3;
11287
11288 clear_metadata_buffer(mParameters);
11289 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11290 LOGE("Failed to set hal version in the parameters");
11291 return BAD_VALUE;
11292 }
11293
11294 /*we need to update the frame number in the parameters*/
11295 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11296 request->frame_number)) {
11297 LOGE("Failed to set the frame number in the parameters");
11298 return BAD_VALUE;
11299 }
11300
11301 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011302 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011303 LOGE("Failed to set stream type mask in the parameters");
11304 return BAD_VALUE;
11305 }
11306
11307 if (mUpdateDebugLevel) {
11308 uint32_t dummyDebugLevel = 0;
11309 /* The value of dummyDebugLevel is irrelavent. On
11310 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11311 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11312 dummyDebugLevel)) {
11313 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11314 return BAD_VALUE;
11315 }
11316 mUpdateDebugLevel = false;
11317 }
11318
11319 if(request->settings != NULL){
11320 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11321 if (blob_request)
11322 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11323 }
11324
11325 return rc;
11326}
11327
11328/*===========================================================================
11329 * FUNCTION : setReprocParameters
11330 *
11331 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11332 * return it.
11333 *
11334 * PARAMETERS :
11335 * @request : request that needs to be serviced
11336 *
11337 * RETURN : success: NO_ERROR
11338 * failure:
11339 *==========================================================================*/
11340int32_t QCamera3HardwareInterface::setReprocParameters(
11341 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11342 uint32_t snapshotStreamId)
11343{
11344 /*translate from camera_metadata_t type to parm_type_t*/
11345 int rc = 0;
11346
11347 if (NULL == request->settings){
11348 LOGE("Reprocess settings cannot be NULL");
11349 return BAD_VALUE;
11350 }
11351
11352 if (NULL == reprocParam) {
11353 LOGE("Invalid reprocessing metadata buffer");
11354 return BAD_VALUE;
11355 }
11356 clear_metadata_buffer(reprocParam);
11357
11358 /*we need to update the frame number in the parameters*/
11359 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11360 request->frame_number)) {
11361 LOGE("Failed to set the frame number in the parameters");
11362 return BAD_VALUE;
11363 }
11364
11365 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11366 if (rc < 0) {
11367 LOGE("Failed to translate reproc request");
11368 return rc;
11369 }
11370
11371 CameraMetadata frame_settings;
11372 frame_settings = request->settings;
11373 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11374 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11375 int32_t *crop_count =
11376 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11377 int32_t *crop_data =
11378 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11379 int32_t *roi_map =
11380 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11381 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11382 cam_crop_data_t crop_meta;
11383 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11384 crop_meta.num_of_streams = 1;
11385 crop_meta.crop_info[0].crop.left = crop_data[0];
11386 crop_meta.crop_info[0].crop.top = crop_data[1];
11387 crop_meta.crop_info[0].crop.width = crop_data[2];
11388 crop_meta.crop_info[0].crop.height = crop_data[3];
11389
11390 crop_meta.crop_info[0].roi_map.left =
11391 roi_map[0];
11392 crop_meta.crop_info[0].roi_map.top =
11393 roi_map[1];
11394 crop_meta.crop_info[0].roi_map.width =
11395 roi_map[2];
11396 crop_meta.crop_info[0].roi_map.height =
11397 roi_map[3];
11398
11399 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11400 rc = BAD_VALUE;
11401 }
11402 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11403 request->input_buffer->stream,
11404 crop_meta.crop_info[0].crop.left,
11405 crop_meta.crop_info[0].crop.top,
11406 crop_meta.crop_info[0].crop.width,
11407 crop_meta.crop_info[0].crop.height);
11408 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11409 request->input_buffer->stream,
11410 crop_meta.crop_info[0].roi_map.left,
11411 crop_meta.crop_info[0].roi_map.top,
11412 crop_meta.crop_info[0].roi_map.width,
11413 crop_meta.crop_info[0].roi_map.height);
11414 } else {
11415 LOGE("Invalid reprocess crop count %d!", *crop_count);
11416 }
11417 } else {
11418 LOGE("No crop data from matching output stream");
11419 }
11420
11421 /* These settings are not needed for regular requests so handle them specially for
11422 reprocess requests; information needed for EXIF tags */
11423 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11424 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11425 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11426 if (NAME_NOT_FOUND != val) {
11427 uint32_t flashMode = (uint32_t)val;
11428 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11429 rc = BAD_VALUE;
11430 }
11431 } else {
11432 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11433 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11434 }
11435 } else {
11436 LOGH("No flash mode in reprocess settings");
11437 }
11438
11439 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11440 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11441 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11442 rc = BAD_VALUE;
11443 }
11444 } else {
11445 LOGH("No flash state in reprocess settings");
11446 }
11447
11448 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11449 uint8_t *reprocessFlags =
11450 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11451 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11452 *reprocessFlags)) {
11453 rc = BAD_VALUE;
11454 }
11455 }
11456
Thierry Strudel54dc9782017-02-15 12:12:10 -080011457 // Add exif debug data to internal metadata
11458 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11459 mm_jpeg_debug_exif_params_t *debug_params =
11460 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11461 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11462 // AE
11463 if (debug_params->ae_debug_params_valid == TRUE) {
11464 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11465 debug_params->ae_debug_params);
11466 }
11467 // AWB
11468 if (debug_params->awb_debug_params_valid == TRUE) {
11469 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11470 debug_params->awb_debug_params);
11471 }
11472 // AF
11473 if (debug_params->af_debug_params_valid == TRUE) {
11474 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11475 debug_params->af_debug_params);
11476 }
11477 // ASD
11478 if (debug_params->asd_debug_params_valid == TRUE) {
11479 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11480 debug_params->asd_debug_params);
11481 }
11482 // Stats
11483 if (debug_params->stats_debug_params_valid == TRUE) {
11484 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11485 debug_params->stats_debug_params);
11486 }
11487 // BE Stats
11488 if (debug_params->bestats_debug_params_valid == TRUE) {
11489 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11490 debug_params->bestats_debug_params);
11491 }
11492 // BHIST
11493 if (debug_params->bhist_debug_params_valid == TRUE) {
11494 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11495 debug_params->bhist_debug_params);
11496 }
11497 // 3A Tuning
11498 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11499 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11500 debug_params->q3a_tuning_debug_params);
11501 }
11502 }
11503
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011504 // Add metadata which reprocess needs
11505 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11506 cam_reprocess_info_t *repro_info =
11507 (cam_reprocess_info_t *)frame_settings.find
11508 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011509 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011510 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011511 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011512 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011513 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011514 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011515 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011516 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011517 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011518 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011519 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011520 repro_info->pipeline_flip);
11521 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11522 repro_info->af_roi);
11523 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11524 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011525 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11526 CAM_INTF_PARM_ROTATION metadata then has been added in
11527 translateToHalMetadata. HAL need to keep this new rotation
11528 metadata. Otherwise, the old rotation info saved in the vendor tag
11529 would be used */
11530 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11531 CAM_INTF_PARM_ROTATION, reprocParam) {
11532 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11533 } else {
11534 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011535 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011536 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011537 }
11538
11539 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11540 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11541 roi.width and roi.height would be the final JPEG size.
11542 For now, HAL only checks this for reprocess request */
11543 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11544 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11545 uint8_t *enable =
11546 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11547 if (*enable == TRUE) {
11548 int32_t *crop_data =
11549 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11550 cam_stream_crop_info_t crop_meta;
11551 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11552 crop_meta.stream_id = 0;
11553 crop_meta.crop.left = crop_data[0];
11554 crop_meta.crop.top = crop_data[1];
11555 crop_meta.crop.width = crop_data[2];
11556 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011557 // The JPEG crop roi should match cpp output size
11558 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11559 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11560 crop_meta.roi_map.left = 0;
11561 crop_meta.roi_map.top = 0;
11562 crop_meta.roi_map.width = cpp_crop->crop.width;
11563 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011564 }
11565 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11566 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011567 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011568 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011569 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11570 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011571 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011572 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11573
11574 // Add JPEG scale information
11575 cam_dimension_t scale_dim;
11576 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11577 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11578 int32_t *roi =
11579 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11580 scale_dim.width = roi[2];
11581 scale_dim.height = roi[3];
11582 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11583 scale_dim);
11584 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11585 scale_dim.width, scale_dim.height, mCameraId);
11586 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011587 }
11588 }
11589
11590 return rc;
11591}
11592
11593/*===========================================================================
11594 * FUNCTION : saveRequestSettings
11595 *
11596 * DESCRIPTION: Add any settings that might have changed to the request settings
11597 * and save the settings to be applied on the frame
11598 *
11599 * PARAMETERS :
11600 * @jpegMetadata : the extracted and/or modified jpeg metadata
11601 * @request : request with initial settings
11602 *
11603 * RETURN :
11604 * camera_metadata_t* : pointer to the saved request settings
11605 *==========================================================================*/
11606camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11607 const CameraMetadata &jpegMetadata,
11608 camera3_capture_request_t *request)
11609{
11610 camera_metadata_t *resultMetadata;
11611 CameraMetadata camMetadata;
11612 camMetadata = request->settings;
11613
11614 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11615 int32_t thumbnail_size[2];
11616 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11617 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11618 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11619 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11620 }
11621
11622 if (request->input_buffer != NULL) {
11623 uint8_t reprocessFlags = 1;
11624 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11625 (uint8_t*)&reprocessFlags,
11626 sizeof(reprocessFlags));
11627 }
11628
11629 resultMetadata = camMetadata.release();
11630 return resultMetadata;
11631}
11632
11633/*===========================================================================
11634 * FUNCTION : setHalFpsRange
11635 *
11636 * DESCRIPTION: set FPS range parameter
11637 *
11638 *
11639 * PARAMETERS :
11640 * @settings : Metadata from framework
11641 * @hal_metadata: Metadata buffer
11642 *
11643 *
11644 * RETURN : success: NO_ERROR
11645 * failure:
11646 *==========================================================================*/
11647int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11648 metadata_buffer_t *hal_metadata)
11649{
11650 int32_t rc = NO_ERROR;
11651 cam_fps_range_t fps_range;
11652 fps_range.min_fps = (float)
11653 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11654 fps_range.max_fps = (float)
11655 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11656 fps_range.video_min_fps = fps_range.min_fps;
11657 fps_range.video_max_fps = fps_range.max_fps;
11658
11659 LOGD("aeTargetFpsRange fps: [%f %f]",
11660 fps_range.min_fps, fps_range.max_fps);
11661 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11662 * follows:
11663 * ---------------------------------------------------------------|
11664 * Video stream is absent in configure_streams |
11665 * (Camcorder preview before the first video record |
11666 * ---------------------------------------------------------------|
11667 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11668 * | | | vid_min/max_fps|
11669 * ---------------------------------------------------------------|
11670 * NO | [ 30, 240] | 240 | [240, 240] |
11671 * |-------------|-------------|----------------|
11672 * | [240, 240] | 240 | [240, 240] |
11673 * ---------------------------------------------------------------|
11674 * Video stream is present in configure_streams |
11675 * ---------------------------------------------------------------|
11676 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11677 * | | | vid_min/max_fps|
11678 * ---------------------------------------------------------------|
11679 * NO | [ 30, 240] | 240 | [240, 240] |
11680 * (camcorder prev |-------------|-------------|----------------|
11681 * after video rec | [240, 240] | 240 | [240, 240] |
11682 * is stopped) | | | |
11683 * ---------------------------------------------------------------|
11684 * YES | [ 30, 240] | 240 | [240, 240] |
11685 * |-------------|-------------|----------------|
11686 * | [240, 240] | 240 | [240, 240] |
11687 * ---------------------------------------------------------------|
11688 * When Video stream is absent in configure_streams,
11689 * preview fps = sensor_fps / batchsize
11690 * Eg: for 240fps at batchSize 4, preview = 60fps
11691 * for 120fps at batchSize 4, preview = 30fps
11692 *
11693 * When video stream is present in configure_streams, preview fps is as per
11694 * the ratio of preview buffers to video buffers requested in process
11695 * capture request
11696 */
11697 mBatchSize = 0;
11698 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11699 fps_range.min_fps = fps_range.video_max_fps;
11700 fps_range.video_min_fps = fps_range.video_max_fps;
11701 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11702 fps_range.max_fps);
11703 if (NAME_NOT_FOUND != val) {
11704 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11706 return BAD_VALUE;
11707 }
11708
11709 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11710 /* If batchmode is currently in progress and the fps changes,
11711 * set the flag to restart the sensor */
11712 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11713 (mHFRVideoFps != fps_range.max_fps)) {
11714 mNeedSensorRestart = true;
11715 }
11716 mHFRVideoFps = fps_range.max_fps;
11717 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11718 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11719 mBatchSize = MAX_HFR_BATCH_SIZE;
11720 }
11721 }
11722 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11723
11724 }
11725 } else {
11726 /* HFR mode is session param in backend/ISP. This should be reset when
11727 * in non-HFR mode */
11728 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11729 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11730 return BAD_VALUE;
11731 }
11732 }
11733 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11734 return BAD_VALUE;
11735 }
11736 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11737 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11738 return rc;
11739}
11740
11741/*===========================================================================
11742 * FUNCTION : translateToHalMetadata
11743 *
11744 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11745 *
11746 *
11747 * PARAMETERS :
11748 * @request : request sent from framework
11749 *
11750 *
11751 * RETURN : success: NO_ERROR
11752 * failure:
11753 *==========================================================================*/
11754int QCamera3HardwareInterface::translateToHalMetadata
11755 (const camera3_capture_request_t *request,
11756 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011757 uint32_t snapshotStreamId) {
11758 if (request == nullptr || hal_metadata == nullptr) {
11759 return BAD_VALUE;
11760 }
11761
11762 int64_t minFrameDuration = getMinFrameDuration(request);
11763
11764 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11765 minFrameDuration);
11766}
11767
11768int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11769 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11770 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11771
Thierry Strudel3d639192016-09-09 11:52:26 -070011772 int rc = 0;
11773 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011774 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011775
11776 /* Do not change the order of the following list unless you know what you are
11777 * doing.
11778 * The order is laid out in such a way that parameters in the front of the table
11779 * may be used to override the parameters later in the table. Examples are:
11780 * 1. META_MODE should precede AEC/AWB/AF MODE
11781 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11782 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11783 * 4. Any mode should precede it's corresponding settings
11784 */
11785 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11786 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11788 rc = BAD_VALUE;
11789 }
11790 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11791 if (rc != NO_ERROR) {
11792 LOGE("extractSceneMode failed");
11793 }
11794 }
11795
11796 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11797 uint8_t fwk_aeMode =
11798 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11799 uint8_t aeMode;
11800 int32_t redeye;
11801
11802 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11803 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011804 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11805 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011806 } else {
11807 aeMode = CAM_AE_MODE_ON;
11808 }
11809 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11810 redeye = 1;
11811 } else {
11812 redeye = 0;
11813 }
11814
11815 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11816 fwk_aeMode);
11817 if (NAME_NOT_FOUND != val) {
11818 int32_t flashMode = (int32_t)val;
11819 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11820 }
11821
11822 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11823 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11824 rc = BAD_VALUE;
11825 }
11826 }
11827
11828 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11829 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11830 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11831 fwk_whiteLevel);
11832 if (NAME_NOT_FOUND != val) {
11833 uint8_t whiteLevel = (uint8_t)val;
11834 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11835 rc = BAD_VALUE;
11836 }
11837 }
11838 }
11839
11840 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11841 uint8_t fwk_cacMode =
11842 frame_settings.find(
11843 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11844 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11845 fwk_cacMode);
11846 if (NAME_NOT_FOUND != val) {
11847 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11848 bool entryAvailable = FALSE;
11849 // Check whether Frameworks set CAC mode is supported in device or not
11850 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11851 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11852 entryAvailable = TRUE;
11853 break;
11854 }
11855 }
11856 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11857 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11858 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11859 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11860 if (entryAvailable == FALSE) {
11861 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11862 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11863 } else {
11864 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11865 // High is not supported and so set the FAST as spec say's underlying
11866 // device implementation can be the same for both modes.
11867 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11868 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11869 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11870 // in order to avoid the fps drop due to high quality
11871 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11872 } else {
11873 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11874 }
11875 }
11876 }
11877 LOGD("Final cacMode is %d", cacMode);
11878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11879 rc = BAD_VALUE;
11880 }
11881 } else {
11882 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11883 }
11884 }
11885
Thierry Strudel2896d122017-02-23 19:18:03 -080011886 char af_value[PROPERTY_VALUE_MAX];
11887 property_get("persist.camera.af.infinity", af_value, "0");
11888
Jason Lee84ae9972017-02-24 13:24:24 -080011889 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011890 if (atoi(af_value) == 0) {
11891 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011892 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011893 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11894 fwk_focusMode);
11895 if (NAME_NOT_FOUND != val) {
11896 uint8_t focusMode = (uint8_t)val;
11897 LOGD("set focus mode %d", focusMode);
11898 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11899 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11900 rc = BAD_VALUE;
11901 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011902 }
11903 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011904 } else {
11905 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11906 LOGE("Focus forced to infinity %d", focusMode);
11907 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11908 rc = BAD_VALUE;
11909 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011910 }
11911
Jason Lee84ae9972017-02-24 13:24:24 -080011912 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11913 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011914 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11916 focalDistance)) {
11917 rc = BAD_VALUE;
11918 }
11919 }
11920
11921 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11922 uint8_t fwk_antibandingMode =
11923 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11924 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11925 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11926 if (NAME_NOT_FOUND != val) {
11927 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011928 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11929 if (m60HzZone) {
11930 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11931 } else {
11932 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11933 }
11934 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011935 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11936 hal_antibandingMode)) {
11937 rc = BAD_VALUE;
11938 }
11939 }
11940 }
11941
11942 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11943 int32_t expCompensation = frame_settings.find(
11944 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11945 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11946 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11947 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11948 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011949 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11951 expCompensation)) {
11952 rc = BAD_VALUE;
11953 }
11954 }
11955
11956 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11957 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11958 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11959 rc = BAD_VALUE;
11960 }
11961 }
11962 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11963 rc = setHalFpsRange(frame_settings, hal_metadata);
11964 if (rc != NO_ERROR) {
11965 LOGE("setHalFpsRange failed");
11966 }
11967 }
11968
11969 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11970 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11972 rc = BAD_VALUE;
11973 }
11974 }
11975
11976 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11977 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11978 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11979 fwk_effectMode);
11980 if (NAME_NOT_FOUND != val) {
11981 uint8_t effectMode = (uint8_t)val;
11982 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11983 rc = BAD_VALUE;
11984 }
11985 }
11986 }
11987
11988 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11989 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11991 colorCorrectMode)) {
11992 rc = BAD_VALUE;
11993 }
11994 }
11995
11996 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11997 cam_color_correct_gains_t colorCorrectGains;
11998 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11999 colorCorrectGains.gains[i] =
12000 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12001 }
12002 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12003 colorCorrectGains)) {
12004 rc = BAD_VALUE;
12005 }
12006 }
12007
12008 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12009 cam_color_correct_matrix_t colorCorrectTransform;
12010 cam_rational_type_t transform_elem;
12011 size_t num = 0;
12012 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12013 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12014 transform_elem.numerator =
12015 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12016 transform_elem.denominator =
12017 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12018 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12019 num++;
12020 }
12021 }
12022 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12023 colorCorrectTransform)) {
12024 rc = BAD_VALUE;
12025 }
12026 }
12027
12028 cam_trigger_t aecTrigger;
12029 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12030 aecTrigger.trigger_id = -1;
12031 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12032 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12033 aecTrigger.trigger =
12034 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12035 aecTrigger.trigger_id =
12036 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12037 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12038 aecTrigger)) {
12039 rc = BAD_VALUE;
12040 }
12041 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12042 aecTrigger.trigger, aecTrigger.trigger_id);
12043 }
12044
12045 /*af_trigger must come with a trigger id*/
12046 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12047 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12048 cam_trigger_t af_trigger;
12049 af_trigger.trigger =
12050 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12051 af_trigger.trigger_id =
12052 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12053 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12054 rc = BAD_VALUE;
12055 }
12056 LOGD("AfTrigger: %d AfTriggerID: %d",
12057 af_trigger.trigger, af_trigger.trigger_id);
12058 }
12059
12060 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12061 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12062 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12063 rc = BAD_VALUE;
12064 }
12065 }
12066 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12067 cam_edge_application_t edge_application;
12068 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012069
Thierry Strudel3d639192016-09-09 11:52:26 -070012070 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12071 edge_application.sharpness = 0;
12072 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012073 edge_application.sharpness =
12074 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12075 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12076 int32_t sharpness =
12077 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12078 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12079 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12080 LOGD("Setting edge mode sharpness %d", sharpness);
12081 edge_application.sharpness = sharpness;
12082 }
12083 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012084 }
12085 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12086 rc = BAD_VALUE;
12087 }
12088 }
12089
12090 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12091 int32_t respectFlashMode = 1;
12092 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12093 uint8_t fwk_aeMode =
12094 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012095 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12096 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12097 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012098 respectFlashMode = 0;
12099 LOGH("AE Mode controls flash, ignore android.flash.mode");
12100 }
12101 }
12102 if (respectFlashMode) {
12103 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12104 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12105 LOGH("flash mode after mapping %d", val);
12106 // To check: CAM_INTF_META_FLASH_MODE usage
12107 if (NAME_NOT_FOUND != val) {
12108 uint8_t flashMode = (uint8_t)val;
12109 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12110 rc = BAD_VALUE;
12111 }
12112 }
12113 }
12114 }
12115
12116 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12117 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12118 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12119 rc = BAD_VALUE;
12120 }
12121 }
12122
12123 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12124 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12125 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12126 flashFiringTime)) {
12127 rc = BAD_VALUE;
12128 }
12129 }
12130
12131 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12132 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12133 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12134 hotPixelMode)) {
12135 rc = BAD_VALUE;
12136 }
12137 }
12138
12139 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12140 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12141 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12142 lensAperture)) {
12143 rc = BAD_VALUE;
12144 }
12145 }
12146
12147 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12148 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12149 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12150 filterDensity)) {
12151 rc = BAD_VALUE;
12152 }
12153 }
12154
12155 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12156 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12157 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12158 focalLength)) {
12159 rc = BAD_VALUE;
12160 }
12161 }
12162
12163 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12164 uint8_t optStabMode =
12165 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12166 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12167 optStabMode)) {
12168 rc = BAD_VALUE;
12169 }
12170 }
12171
12172 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12173 uint8_t videoStabMode =
12174 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12175 LOGD("videoStabMode from APP = %d", videoStabMode);
12176 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12177 videoStabMode)) {
12178 rc = BAD_VALUE;
12179 }
12180 }
12181
12182
12183 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12184 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12185 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12186 noiseRedMode)) {
12187 rc = BAD_VALUE;
12188 }
12189 }
12190
12191 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12192 float reprocessEffectiveExposureFactor =
12193 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12194 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12195 reprocessEffectiveExposureFactor)) {
12196 rc = BAD_VALUE;
12197 }
12198 }
12199
12200 cam_crop_region_t scalerCropRegion;
12201 bool scalerCropSet = false;
12202 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12203 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12204 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12205 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12206 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12207
12208 // Map coordinate system from active array to sensor output.
12209 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12210 scalerCropRegion.width, scalerCropRegion.height);
12211
12212 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12213 scalerCropRegion)) {
12214 rc = BAD_VALUE;
12215 }
12216 scalerCropSet = true;
12217 }
12218
12219 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12220 int64_t sensorExpTime =
12221 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12222 LOGD("setting sensorExpTime %lld", sensorExpTime);
12223 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12224 sensorExpTime)) {
12225 rc = BAD_VALUE;
12226 }
12227 }
12228
12229 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12230 int64_t sensorFrameDuration =
12231 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012232 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12233 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12234 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12235 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12236 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12237 sensorFrameDuration)) {
12238 rc = BAD_VALUE;
12239 }
12240 }
12241
12242 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12243 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12244 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12245 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12246 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12247 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12248 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12249 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12250 sensorSensitivity)) {
12251 rc = BAD_VALUE;
12252 }
12253 }
12254
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012255#ifndef USE_HAL_3_3
12256 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12257 int32_t ispSensitivity =
12258 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12259 if (ispSensitivity <
12260 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12261 ispSensitivity =
12262 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12263 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12264 }
12265 if (ispSensitivity >
12266 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12267 ispSensitivity =
12268 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12269 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12270 }
12271 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12272 ispSensitivity)) {
12273 rc = BAD_VALUE;
12274 }
12275 }
12276#endif
12277
Thierry Strudel3d639192016-09-09 11:52:26 -070012278 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12279 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12280 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12281 rc = BAD_VALUE;
12282 }
12283 }
12284
12285 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12286 uint8_t fwk_facedetectMode =
12287 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12288
12289 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12290 fwk_facedetectMode);
12291
12292 if (NAME_NOT_FOUND != val) {
12293 uint8_t facedetectMode = (uint8_t)val;
12294 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12295 facedetectMode)) {
12296 rc = BAD_VALUE;
12297 }
12298 }
12299 }
12300
Thierry Strudel54dc9782017-02-15 12:12:10 -080012301 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012302 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012303 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012304 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12305 histogramMode)) {
12306 rc = BAD_VALUE;
12307 }
12308 }
12309
12310 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12311 uint8_t sharpnessMapMode =
12312 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12313 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12314 sharpnessMapMode)) {
12315 rc = BAD_VALUE;
12316 }
12317 }
12318
12319 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12320 uint8_t tonemapMode =
12321 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12322 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12323 rc = BAD_VALUE;
12324 }
12325 }
12326 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12327 /*All tonemap channels will have the same number of points*/
12328 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12329 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12330 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12331 cam_rgb_tonemap_curves tonemapCurves;
12332 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12333 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12334 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12335 tonemapCurves.tonemap_points_cnt,
12336 CAM_MAX_TONEMAP_CURVE_SIZE);
12337 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12338 }
12339
12340 /* ch0 = G*/
12341 size_t point = 0;
12342 cam_tonemap_curve_t tonemapCurveGreen;
12343 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12344 for (size_t j = 0; j < 2; j++) {
12345 tonemapCurveGreen.tonemap_points[i][j] =
12346 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12347 point++;
12348 }
12349 }
12350 tonemapCurves.curves[0] = tonemapCurveGreen;
12351
12352 /* ch 1 = B */
12353 point = 0;
12354 cam_tonemap_curve_t tonemapCurveBlue;
12355 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12356 for (size_t j = 0; j < 2; j++) {
12357 tonemapCurveBlue.tonemap_points[i][j] =
12358 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12359 point++;
12360 }
12361 }
12362 tonemapCurves.curves[1] = tonemapCurveBlue;
12363
12364 /* ch 2 = R */
12365 point = 0;
12366 cam_tonemap_curve_t tonemapCurveRed;
12367 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12368 for (size_t j = 0; j < 2; j++) {
12369 tonemapCurveRed.tonemap_points[i][j] =
12370 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12371 point++;
12372 }
12373 }
12374 tonemapCurves.curves[2] = tonemapCurveRed;
12375
12376 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12377 tonemapCurves)) {
12378 rc = BAD_VALUE;
12379 }
12380 }
12381
12382 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12383 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12384 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12385 captureIntent)) {
12386 rc = BAD_VALUE;
12387 }
12388 }
12389
12390 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12391 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12392 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12393 blackLevelLock)) {
12394 rc = BAD_VALUE;
12395 }
12396 }
12397
12398 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12399 uint8_t lensShadingMapMode =
12400 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12401 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12402 lensShadingMapMode)) {
12403 rc = BAD_VALUE;
12404 }
12405 }
12406
12407 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12408 cam_area_t roi;
12409 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012410 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012411
12412 // Map coordinate system from active array to sensor output.
12413 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12414 roi.rect.height);
12415
12416 if (scalerCropSet) {
12417 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12418 }
12419 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12420 rc = BAD_VALUE;
12421 }
12422 }
12423
12424 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12425 cam_area_t roi;
12426 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012427 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012428
12429 // Map coordinate system from active array to sensor output.
12430 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12431 roi.rect.height);
12432
12433 if (scalerCropSet) {
12434 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12435 }
12436 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12437 rc = BAD_VALUE;
12438 }
12439 }
12440
12441 // CDS for non-HFR non-video mode
12442 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12443 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12444 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12445 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12446 LOGE("Invalid CDS mode %d!", *fwk_cds);
12447 } else {
12448 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12449 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12450 rc = BAD_VALUE;
12451 }
12452 }
12453 }
12454
Thierry Strudel04e026f2016-10-10 11:27:36 -070012455 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012456 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012457 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012458 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12459 }
12460 if (m_bVideoHdrEnabled)
12461 vhdr = CAM_VIDEO_HDR_MODE_ON;
12462
Thierry Strudel54dc9782017-02-15 12:12:10 -080012463 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12464
12465 if(vhdr != curr_hdr_state)
12466 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12467
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012468 rc = setVideoHdrMode(mParameters, vhdr);
12469 if (rc != NO_ERROR) {
12470 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012471 }
12472
12473 //IR
12474 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12475 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12476 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012477 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12478 uint8_t isIRon = 0;
12479
12480 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012481 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12482 LOGE("Invalid IR mode %d!", fwk_ir);
12483 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012484 if(isIRon != curr_ir_state )
12485 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12486
Thierry Strudel04e026f2016-10-10 11:27:36 -070012487 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12488 CAM_INTF_META_IR_MODE, fwk_ir)) {
12489 rc = BAD_VALUE;
12490 }
12491 }
12492 }
12493
Thierry Strudel54dc9782017-02-15 12:12:10 -080012494 //Binning Correction Mode
12495 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12496 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12497 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12498 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12499 || (0 > fwk_binning_correction)) {
12500 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12501 } else {
12502 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12503 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12504 rc = BAD_VALUE;
12505 }
12506 }
12507 }
12508
Thierry Strudel269c81a2016-10-12 12:13:59 -070012509 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12510 float aec_speed;
12511 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12512 LOGD("AEC Speed :%f", aec_speed);
12513 if ( aec_speed < 0 ) {
12514 LOGE("Invalid AEC mode %f!", aec_speed);
12515 } else {
12516 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12517 aec_speed)) {
12518 rc = BAD_VALUE;
12519 }
12520 }
12521 }
12522
12523 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12524 float awb_speed;
12525 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12526 LOGD("AWB Speed :%f", awb_speed);
12527 if ( awb_speed < 0 ) {
12528 LOGE("Invalid AWB mode %f!", awb_speed);
12529 } else {
12530 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12531 awb_speed)) {
12532 rc = BAD_VALUE;
12533 }
12534 }
12535 }
12536
Thierry Strudel3d639192016-09-09 11:52:26 -070012537 // TNR
12538 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12539 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12540 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012541 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012542 cam_denoise_param_t tnr;
12543 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12544 tnr.process_plates =
12545 (cam_denoise_process_type_t)frame_settings.find(
12546 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12547 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012548
12549 if(b_TnrRequested != curr_tnr_state)
12550 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12551
Thierry Strudel3d639192016-09-09 11:52:26 -070012552 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12553 rc = BAD_VALUE;
12554 }
12555 }
12556
Thierry Strudel54dc9782017-02-15 12:12:10 -080012557 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012558 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012559 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012560 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12561 *exposure_metering_mode)) {
12562 rc = BAD_VALUE;
12563 }
12564 }
12565
Thierry Strudel3d639192016-09-09 11:52:26 -070012566 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12567 int32_t fwk_testPatternMode =
12568 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12569 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12570 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12571
12572 if (NAME_NOT_FOUND != testPatternMode) {
12573 cam_test_pattern_data_t testPatternData;
12574 memset(&testPatternData, 0, sizeof(testPatternData));
12575 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12576 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12577 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12578 int32_t *fwk_testPatternData =
12579 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12580 testPatternData.r = fwk_testPatternData[0];
12581 testPatternData.b = fwk_testPatternData[3];
12582 switch (gCamCapability[mCameraId]->color_arrangement) {
12583 case CAM_FILTER_ARRANGEMENT_RGGB:
12584 case CAM_FILTER_ARRANGEMENT_GRBG:
12585 testPatternData.gr = fwk_testPatternData[1];
12586 testPatternData.gb = fwk_testPatternData[2];
12587 break;
12588 case CAM_FILTER_ARRANGEMENT_GBRG:
12589 case CAM_FILTER_ARRANGEMENT_BGGR:
12590 testPatternData.gr = fwk_testPatternData[2];
12591 testPatternData.gb = fwk_testPatternData[1];
12592 break;
12593 default:
12594 LOGE("color arrangement %d is not supported",
12595 gCamCapability[mCameraId]->color_arrangement);
12596 break;
12597 }
12598 }
12599 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12600 testPatternData)) {
12601 rc = BAD_VALUE;
12602 }
12603 } else {
12604 LOGE("Invalid framework sensor test pattern mode %d",
12605 fwk_testPatternMode);
12606 }
12607 }
12608
12609 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12610 size_t count = 0;
12611 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12612 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12613 gps_coords.data.d, gps_coords.count, count);
12614 if (gps_coords.count != count) {
12615 rc = BAD_VALUE;
12616 }
12617 }
12618
12619 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12620 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12621 size_t count = 0;
12622 const char *gps_methods_src = (const char *)
12623 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12624 memset(gps_methods, '\0', sizeof(gps_methods));
12625 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12626 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12627 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12628 if (GPS_PROCESSING_METHOD_SIZE != count) {
12629 rc = BAD_VALUE;
12630 }
12631 }
12632
12633 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12634 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12635 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12636 gps_timestamp)) {
12637 rc = BAD_VALUE;
12638 }
12639 }
12640
12641 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12642 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12643 cam_rotation_info_t rotation_info;
12644 if (orientation == 0) {
12645 rotation_info.rotation = ROTATE_0;
12646 } else if (orientation == 90) {
12647 rotation_info.rotation = ROTATE_90;
12648 } else if (orientation == 180) {
12649 rotation_info.rotation = ROTATE_180;
12650 } else if (orientation == 270) {
12651 rotation_info.rotation = ROTATE_270;
12652 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012653 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012654 rotation_info.streamId = snapshotStreamId;
12655 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12656 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12657 rc = BAD_VALUE;
12658 }
12659 }
12660
12661 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12662 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12663 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12664 rc = BAD_VALUE;
12665 }
12666 }
12667
12668 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12669 uint32_t thumb_quality = (uint32_t)
12670 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12672 thumb_quality)) {
12673 rc = BAD_VALUE;
12674 }
12675 }
12676
12677 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12678 cam_dimension_t dim;
12679 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12680 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12681 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12682 rc = BAD_VALUE;
12683 }
12684 }
12685
12686 // Internal metadata
12687 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12688 size_t count = 0;
12689 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12690 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12691 privatedata.data.i32, privatedata.count, count);
12692 if (privatedata.count != count) {
12693 rc = BAD_VALUE;
12694 }
12695 }
12696
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012697 // ISO/Exposure Priority
12698 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12699 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12700 cam_priority_mode_t mode =
12701 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12702 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12703 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12704 use_iso_exp_pty.previewOnly = FALSE;
12705 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12706 use_iso_exp_pty.value = *ptr;
12707
12708 if(CAM_ISO_PRIORITY == mode) {
12709 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12710 use_iso_exp_pty)) {
12711 rc = BAD_VALUE;
12712 }
12713 }
12714 else {
12715 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12716 use_iso_exp_pty)) {
12717 rc = BAD_VALUE;
12718 }
12719 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012720
12721 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12722 rc = BAD_VALUE;
12723 }
12724 }
12725 } else {
12726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12727 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012728 }
12729 }
12730
12731 // Saturation
12732 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12733 int32_t* use_saturation =
12734 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12735 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12736 rc = BAD_VALUE;
12737 }
12738 }
12739
Thierry Strudel3d639192016-09-09 11:52:26 -070012740 // EV step
12741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12742 gCamCapability[mCameraId]->exp_compensation_step)) {
12743 rc = BAD_VALUE;
12744 }
12745
12746 // CDS info
12747 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12748 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12749 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12750
12751 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12752 CAM_INTF_META_CDS_DATA, *cdsData)) {
12753 rc = BAD_VALUE;
12754 }
12755 }
12756
Shuzhen Wang19463d72016-03-08 11:09:52 -080012757 // Hybrid AE
12758 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12759 uint8_t *hybrid_ae = (uint8_t *)
12760 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12761
12762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12763 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12764 rc = BAD_VALUE;
12765 }
12766 }
12767
Shuzhen Wang14415f52016-11-16 18:26:18 -080012768 // Histogram
12769 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12770 uint8_t histogramMode =
12771 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12772 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12773 histogramMode)) {
12774 rc = BAD_VALUE;
12775 }
12776 }
12777
12778 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12779 int32_t histogramBins =
12780 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12781 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12782 histogramBins)) {
12783 rc = BAD_VALUE;
12784 }
12785 }
12786
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012787 // Tracking AF
12788 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12789 uint8_t trackingAfTrigger =
12790 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12791 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12792 trackingAfTrigger)) {
12793 rc = BAD_VALUE;
12794 }
12795 }
12796
Thierry Strudel3d639192016-09-09 11:52:26 -070012797 return rc;
12798}
12799
12800/*===========================================================================
12801 * FUNCTION : captureResultCb
12802 *
12803 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12804 *
12805 * PARAMETERS :
12806 * @frame : frame information from mm-camera-interface
12807 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12808 * @userdata: userdata
12809 *
12810 * RETURN : NONE
12811 *==========================================================================*/
12812void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12813 camera3_stream_buffer_t *buffer,
12814 uint32_t frame_number, bool isInputBuffer, void *userdata)
12815{
12816 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12817 if (hw == NULL) {
12818 LOGE("Invalid hw %p", hw);
12819 return;
12820 }
12821
12822 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12823 return;
12824}
12825
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012826/*===========================================================================
12827 * FUNCTION : setBufferErrorStatus
12828 *
12829 * DESCRIPTION: Callback handler for channels to report any buffer errors
12830 *
12831 * PARAMETERS :
12832 * @ch : Channel on which buffer error is reported from
12833 * @frame_number : frame number on which buffer error is reported on
12834 * @buffer_status : buffer error status
12835 * @userdata: userdata
12836 *
12837 * RETURN : NONE
12838 *==========================================================================*/
12839void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12840 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12841{
12842 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12843 if (hw == NULL) {
12844 LOGE("Invalid hw %p", hw);
12845 return;
12846 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012847
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012848 hw->setBufferErrorStatus(ch, frame_number, err);
12849 return;
12850}
12851
12852void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12853 uint32_t frameNumber, camera3_buffer_status_t err)
12854{
12855 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12856 pthread_mutex_lock(&mMutex);
12857
12858 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12859 if (req.frame_number != frameNumber)
12860 continue;
12861 for (auto& k : req.mPendingBufferList) {
12862 if(k.stream->priv == ch) {
12863 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12864 }
12865 }
12866 }
12867
12868 pthread_mutex_unlock(&mMutex);
12869 return;
12870}
Thierry Strudel3d639192016-09-09 11:52:26 -070012871/*===========================================================================
12872 * FUNCTION : initialize
12873 *
12874 * DESCRIPTION: Pass framework callback pointers to HAL
12875 *
12876 * PARAMETERS :
12877 *
12878 *
12879 * RETURN : Success : 0
12880 * Failure: -ENODEV
12881 *==========================================================================*/
12882
12883int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12884 const camera3_callback_ops_t *callback_ops)
12885{
12886 LOGD("E");
12887 QCamera3HardwareInterface *hw =
12888 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12889 if (!hw) {
12890 LOGE("NULL camera device");
12891 return -ENODEV;
12892 }
12893
12894 int rc = hw->initialize(callback_ops);
12895 LOGD("X");
12896 return rc;
12897}
12898
12899/*===========================================================================
12900 * FUNCTION : configure_streams
12901 *
12902 * DESCRIPTION:
12903 *
12904 * PARAMETERS :
12905 *
12906 *
12907 * RETURN : Success: 0
12908 * Failure: -EINVAL (if stream configuration is invalid)
12909 * -ENODEV (fatal error)
12910 *==========================================================================*/
12911
12912int QCamera3HardwareInterface::configure_streams(
12913 const struct camera3_device *device,
12914 camera3_stream_configuration_t *stream_list)
12915{
12916 LOGD("E");
12917 QCamera3HardwareInterface *hw =
12918 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12919 if (!hw) {
12920 LOGE("NULL camera device");
12921 return -ENODEV;
12922 }
12923 int rc = hw->configureStreams(stream_list);
12924 LOGD("X");
12925 return rc;
12926}
12927
12928/*===========================================================================
12929 * FUNCTION : construct_default_request_settings
12930 *
12931 * DESCRIPTION: Configure a settings buffer to meet the required use case
12932 *
12933 * PARAMETERS :
12934 *
12935 *
12936 * RETURN : Success: Return valid metadata
12937 * Failure: Return NULL
12938 *==========================================================================*/
12939const camera_metadata_t* QCamera3HardwareInterface::
12940 construct_default_request_settings(const struct camera3_device *device,
12941 int type)
12942{
12943
12944 LOGD("E");
12945 camera_metadata_t* fwk_metadata = NULL;
12946 QCamera3HardwareInterface *hw =
12947 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12948 if (!hw) {
12949 LOGE("NULL camera device");
12950 return NULL;
12951 }
12952
12953 fwk_metadata = hw->translateCapabilityToMetadata(type);
12954
12955 LOGD("X");
12956 return fwk_metadata;
12957}
12958
12959/*===========================================================================
12960 * FUNCTION : process_capture_request
12961 *
12962 * DESCRIPTION:
12963 *
12964 * PARAMETERS :
12965 *
12966 *
12967 * RETURN :
12968 *==========================================================================*/
12969int QCamera3HardwareInterface::process_capture_request(
12970 const struct camera3_device *device,
12971 camera3_capture_request_t *request)
12972{
12973 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012974 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012975 QCamera3HardwareInterface *hw =
12976 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12977 if (!hw) {
12978 LOGE("NULL camera device");
12979 return -EINVAL;
12980 }
12981
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012982 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012983 LOGD("X");
12984 return rc;
12985}
12986
12987/*===========================================================================
12988 * FUNCTION : dump
12989 *
12990 * DESCRIPTION:
12991 *
12992 * PARAMETERS :
12993 *
12994 *
12995 * RETURN :
12996 *==========================================================================*/
12997
12998void QCamera3HardwareInterface::dump(
12999 const struct camera3_device *device, int fd)
13000{
13001 /* Log level property is read when "adb shell dumpsys media.camera" is
13002 called so that the log level can be controlled without restarting
13003 the media server */
13004 getLogLevel();
13005
13006 LOGD("E");
13007 QCamera3HardwareInterface *hw =
13008 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13009 if (!hw) {
13010 LOGE("NULL camera device");
13011 return;
13012 }
13013
13014 hw->dump(fd);
13015 LOGD("X");
13016 return;
13017}
13018
13019/*===========================================================================
13020 * FUNCTION : flush
13021 *
13022 * DESCRIPTION:
13023 *
13024 * PARAMETERS :
13025 *
13026 *
13027 * RETURN :
13028 *==========================================================================*/
13029
13030int QCamera3HardwareInterface::flush(
13031 const struct camera3_device *device)
13032{
13033 int rc;
13034 LOGD("E");
13035 QCamera3HardwareInterface *hw =
13036 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13037 if (!hw) {
13038 LOGE("NULL camera device");
13039 return -EINVAL;
13040 }
13041
13042 pthread_mutex_lock(&hw->mMutex);
13043 // Validate current state
13044 switch (hw->mState) {
13045 case STARTED:
13046 /* valid state */
13047 break;
13048
13049 case ERROR:
13050 pthread_mutex_unlock(&hw->mMutex);
13051 hw->handleCameraDeviceError();
13052 return -ENODEV;
13053
13054 default:
13055 LOGI("Flush returned during state %d", hw->mState);
13056 pthread_mutex_unlock(&hw->mMutex);
13057 return 0;
13058 }
13059 pthread_mutex_unlock(&hw->mMutex);
13060
13061 rc = hw->flush(true /* restart channels */ );
13062 LOGD("X");
13063 return rc;
13064}
13065
13066/*===========================================================================
13067 * FUNCTION : close_camera_device
13068 *
13069 * DESCRIPTION:
13070 *
13071 * PARAMETERS :
13072 *
13073 *
13074 * RETURN :
13075 *==========================================================================*/
13076int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13077{
13078 int ret = NO_ERROR;
13079 QCamera3HardwareInterface *hw =
13080 reinterpret_cast<QCamera3HardwareInterface *>(
13081 reinterpret_cast<camera3_device_t *>(device)->priv);
13082 if (!hw) {
13083 LOGE("NULL camera device");
13084 return BAD_VALUE;
13085 }
13086
13087 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13088 delete hw;
13089 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013090 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013091 return ret;
13092}
13093
13094/*===========================================================================
13095 * FUNCTION : getWaveletDenoiseProcessPlate
13096 *
13097 * DESCRIPTION: query wavelet denoise process plate
13098 *
13099 * PARAMETERS : None
13100 *
13101 * RETURN : WNR prcocess plate value
13102 *==========================================================================*/
13103cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13104{
13105 char prop[PROPERTY_VALUE_MAX];
13106 memset(prop, 0, sizeof(prop));
13107 property_get("persist.denoise.process.plates", prop, "0");
13108 int processPlate = atoi(prop);
13109 switch(processPlate) {
13110 case 0:
13111 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13112 case 1:
13113 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13114 case 2:
13115 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13116 case 3:
13117 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13118 default:
13119 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13120 }
13121}
13122
13123
13124/*===========================================================================
13125 * FUNCTION : getTemporalDenoiseProcessPlate
13126 *
13127 * DESCRIPTION: query temporal denoise process plate
13128 *
13129 * PARAMETERS : None
13130 *
13131 * RETURN : TNR prcocess plate value
13132 *==========================================================================*/
13133cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13134{
13135 char prop[PROPERTY_VALUE_MAX];
13136 memset(prop, 0, sizeof(prop));
13137 property_get("persist.tnr.process.plates", prop, "0");
13138 int processPlate = atoi(prop);
13139 switch(processPlate) {
13140 case 0:
13141 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13142 case 1:
13143 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13144 case 2:
13145 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13146 case 3:
13147 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13148 default:
13149 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13150 }
13151}
13152
13153
13154/*===========================================================================
13155 * FUNCTION : extractSceneMode
13156 *
13157 * DESCRIPTION: Extract scene mode from frameworks set metadata
13158 *
13159 * PARAMETERS :
13160 * @frame_settings: CameraMetadata reference
13161 * @metaMode: ANDROID_CONTORL_MODE
13162 * @hal_metadata: hal metadata structure
13163 *
13164 * RETURN : None
13165 *==========================================================================*/
13166int32_t QCamera3HardwareInterface::extractSceneMode(
13167 const CameraMetadata &frame_settings, uint8_t metaMode,
13168 metadata_buffer_t *hal_metadata)
13169{
13170 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013171 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13172
13173 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13174 LOGD("Ignoring control mode OFF_KEEP_STATE");
13175 return NO_ERROR;
13176 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013177
13178 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13179 camera_metadata_ro_entry entry =
13180 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13181 if (0 == entry.count)
13182 return rc;
13183
13184 uint8_t fwk_sceneMode = entry.data.u8[0];
13185
13186 int val = lookupHalName(SCENE_MODES_MAP,
13187 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13188 fwk_sceneMode);
13189 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013190 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013191 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013192 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013193 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013194
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013195 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13196 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13197 }
13198
13199 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13200 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013201 cam_hdr_param_t hdr_params;
13202 hdr_params.hdr_enable = 1;
13203 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13204 hdr_params.hdr_need_1x = false;
13205 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13206 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13207 rc = BAD_VALUE;
13208 }
13209 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013210
Thierry Strudel3d639192016-09-09 11:52:26 -070013211 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13212 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13213 rc = BAD_VALUE;
13214 }
13215 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013216
13217 if (mForceHdrSnapshot) {
13218 cam_hdr_param_t hdr_params;
13219 hdr_params.hdr_enable = 1;
13220 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13221 hdr_params.hdr_need_1x = false;
13222 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13223 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13224 rc = BAD_VALUE;
13225 }
13226 }
13227
Thierry Strudel3d639192016-09-09 11:52:26 -070013228 return rc;
13229}
13230
13231/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013232 * FUNCTION : setVideoHdrMode
13233 *
13234 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13235 *
13236 * PARAMETERS :
13237 * @hal_metadata: hal metadata structure
13238 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13239 *
13240 * RETURN : None
13241 *==========================================================================*/
13242int32_t QCamera3HardwareInterface::setVideoHdrMode(
13243 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13244{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013245 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13246 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13247 }
13248
13249 LOGE("Invalid Video HDR mode %d!", vhdr);
13250 return BAD_VALUE;
13251}
13252
13253/*===========================================================================
13254 * FUNCTION : setSensorHDR
13255 *
13256 * DESCRIPTION: Enable/disable sensor HDR.
13257 *
13258 * PARAMETERS :
13259 * @hal_metadata: hal metadata structure
13260 * @enable: boolean whether to enable/disable sensor HDR
13261 *
13262 * RETURN : None
13263 *==========================================================================*/
13264int32_t QCamera3HardwareInterface::setSensorHDR(
13265 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13266{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013267 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013268 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13269
13270 if (enable) {
13271 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13272 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13273 #ifdef _LE_CAMERA_
13274 //Default to staggered HDR for IOT
13275 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13276 #else
13277 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13278 #endif
13279 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13280 }
13281
13282 bool isSupported = false;
13283 switch (sensor_hdr) {
13284 case CAM_SENSOR_HDR_IN_SENSOR:
13285 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13286 CAM_QCOM_FEATURE_SENSOR_HDR) {
13287 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013288 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013289 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013290 break;
13291 case CAM_SENSOR_HDR_ZIGZAG:
13292 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13293 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13294 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013295 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013296 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013297 break;
13298 case CAM_SENSOR_HDR_STAGGERED:
13299 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13300 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13301 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013302 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013303 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013304 break;
13305 case CAM_SENSOR_HDR_OFF:
13306 isSupported = true;
13307 LOGD("Turning off sensor HDR");
13308 break;
13309 default:
13310 LOGE("HDR mode %d not supported", sensor_hdr);
13311 rc = BAD_VALUE;
13312 break;
13313 }
13314
13315 if(isSupported) {
13316 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13317 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13318 rc = BAD_VALUE;
13319 } else {
13320 if(!isVideoHdrEnable)
13321 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013322 }
13323 }
13324 return rc;
13325}
13326
13327/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013328 * FUNCTION : needRotationReprocess
13329 *
13330 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13331 *
13332 * PARAMETERS : none
13333 *
13334 * RETURN : true: needed
13335 * false: no need
13336 *==========================================================================*/
13337bool QCamera3HardwareInterface::needRotationReprocess()
13338{
13339 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13340 // current rotation is not zero, and pp has the capability to process rotation
13341 LOGH("need do reprocess for rotation");
13342 return true;
13343 }
13344
13345 return false;
13346}
13347
13348/*===========================================================================
13349 * FUNCTION : needReprocess
13350 *
13351 * DESCRIPTION: if reprocess in needed
13352 *
13353 * PARAMETERS : none
13354 *
13355 * RETURN : true: needed
13356 * false: no need
13357 *==========================================================================*/
13358bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13359{
13360 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13361 // TODO: add for ZSL HDR later
13362 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13363 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13364 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13365 return true;
13366 } else {
13367 LOGH("already post processed frame");
13368 return false;
13369 }
13370 }
13371 return needRotationReprocess();
13372}
13373
13374/*===========================================================================
13375 * FUNCTION : needJpegExifRotation
13376 *
13377 * DESCRIPTION: if rotation from jpeg is needed
13378 *
13379 * PARAMETERS : none
13380 *
13381 * RETURN : true: needed
13382 * false: no need
13383 *==========================================================================*/
13384bool QCamera3HardwareInterface::needJpegExifRotation()
13385{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013386 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013387 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13388 LOGD("Need use Jpeg EXIF Rotation");
13389 return true;
13390 }
13391 return false;
13392}
13393
13394/*===========================================================================
13395 * FUNCTION : addOfflineReprocChannel
13396 *
13397 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13398 * coming from input channel
13399 *
13400 * PARAMETERS :
13401 * @config : reprocess configuration
13402 * @inputChHandle : pointer to the input (source) channel
13403 *
13404 *
13405 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13406 *==========================================================================*/
13407QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13408 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13409{
13410 int32_t rc = NO_ERROR;
13411 QCamera3ReprocessChannel *pChannel = NULL;
13412
13413 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013414 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13415 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013416 if (NULL == pChannel) {
13417 LOGE("no mem for reprocess channel");
13418 return NULL;
13419 }
13420
13421 rc = pChannel->initialize(IS_TYPE_NONE);
13422 if (rc != NO_ERROR) {
13423 LOGE("init reprocess channel failed, ret = %d", rc);
13424 delete pChannel;
13425 return NULL;
13426 }
13427
13428 // pp feature config
13429 cam_pp_feature_config_t pp_config;
13430 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13431
13432 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13433 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13434 & CAM_QCOM_FEATURE_DSDN) {
13435 //Use CPP CDS incase h/w supports it.
13436 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13437 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13438 }
13439 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13440 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13441 }
13442
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013443 if (config.hdr_param.hdr_enable) {
13444 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13445 pp_config.hdr_param = config.hdr_param;
13446 }
13447
13448 if (mForceHdrSnapshot) {
13449 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13450 pp_config.hdr_param.hdr_enable = 1;
13451 pp_config.hdr_param.hdr_need_1x = 0;
13452 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13453 }
13454
Thierry Strudel3d639192016-09-09 11:52:26 -070013455 rc = pChannel->addReprocStreamsFromSource(pp_config,
13456 config,
13457 IS_TYPE_NONE,
13458 mMetadataChannel);
13459
13460 if (rc != NO_ERROR) {
13461 delete pChannel;
13462 return NULL;
13463 }
13464 return pChannel;
13465}
13466
13467/*===========================================================================
13468 * FUNCTION : getMobicatMask
13469 *
13470 * DESCRIPTION: returns mobicat mask
13471 *
13472 * PARAMETERS : none
13473 *
13474 * RETURN : mobicat mask
13475 *
13476 *==========================================================================*/
13477uint8_t QCamera3HardwareInterface::getMobicatMask()
13478{
13479 return m_MobicatMask;
13480}
13481
13482/*===========================================================================
13483 * FUNCTION : setMobicat
13484 *
13485 * DESCRIPTION: set Mobicat on/off.
13486 *
13487 * PARAMETERS :
13488 * @params : none
13489 *
13490 * RETURN : int32_t type of status
13491 * NO_ERROR -- success
13492 * none-zero failure code
13493 *==========================================================================*/
13494int32_t QCamera3HardwareInterface::setMobicat()
13495{
13496 char value [PROPERTY_VALUE_MAX];
13497 property_get("persist.camera.mobicat", value, "0");
13498 int32_t ret = NO_ERROR;
13499 uint8_t enableMobi = (uint8_t)atoi(value);
13500
13501 if (enableMobi) {
13502 tune_cmd_t tune_cmd;
13503 tune_cmd.type = SET_RELOAD_CHROMATIX;
13504 tune_cmd.module = MODULE_ALL;
13505 tune_cmd.value = TRUE;
13506 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13507 CAM_INTF_PARM_SET_VFE_COMMAND,
13508 tune_cmd);
13509
13510 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13511 CAM_INTF_PARM_SET_PP_COMMAND,
13512 tune_cmd);
13513 }
13514 m_MobicatMask = enableMobi;
13515
13516 return ret;
13517}
13518
13519/*===========================================================================
13520* FUNCTION : getLogLevel
13521*
13522* DESCRIPTION: Reads the log level property into a variable
13523*
13524* PARAMETERS :
13525* None
13526*
13527* RETURN :
13528* None
13529*==========================================================================*/
13530void QCamera3HardwareInterface::getLogLevel()
13531{
13532 char prop[PROPERTY_VALUE_MAX];
13533 uint32_t globalLogLevel = 0;
13534
13535 property_get("persist.camera.hal.debug", prop, "0");
13536 int val = atoi(prop);
13537 if (0 <= val) {
13538 gCamHal3LogLevel = (uint32_t)val;
13539 }
13540
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013541 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013542 gKpiDebugLevel = atoi(prop);
13543
13544 property_get("persist.camera.global.debug", prop, "0");
13545 val = atoi(prop);
13546 if (0 <= val) {
13547 globalLogLevel = (uint32_t)val;
13548 }
13549
13550 /* Highest log level among hal.logs and global.logs is selected */
13551 if (gCamHal3LogLevel < globalLogLevel)
13552 gCamHal3LogLevel = globalLogLevel;
13553
13554 return;
13555}
13556
13557/*===========================================================================
13558 * FUNCTION : validateStreamRotations
13559 *
13560 * DESCRIPTION: Check if the rotations requested are supported
13561 *
13562 * PARAMETERS :
13563 * @stream_list : streams to be configured
13564 *
13565 * RETURN : NO_ERROR on success
13566 * -EINVAL on failure
13567 *
13568 *==========================================================================*/
13569int QCamera3HardwareInterface::validateStreamRotations(
13570 camera3_stream_configuration_t *streamList)
13571{
13572 int rc = NO_ERROR;
13573
13574 /*
13575 * Loop through all streams requested in configuration
13576 * Check if unsupported rotations have been requested on any of them
13577 */
13578 for (size_t j = 0; j < streamList->num_streams; j++){
13579 camera3_stream_t *newStream = streamList->streams[j];
13580
13581 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13582 bool isImplDef = (newStream->format ==
13583 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13584 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13585 isImplDef);
13586
13587 if (isRotated && (!isImplDef || isZsl)) {
13588 LOGE("Error: Unsupported rotation of %d requested for stream"
13589 "type:%d and stream format:%d",
13590 newStream->rotation, newStream->stream_type,
13591 newStream->format);
13592 rc = -EINVAL;
13593 break;
13594 }
13595 }
13596
13597 return rc;
13598}
13599
13600/*===========================================================================
13601* FUNCTION : getFlashInfo
13602*
13603* DESCRIPTION: Retrieve information about whether the device has a flash.
13604*
13605* PARAMETERS :
13606* @cameraId : Camera id to query
13607* @hasFlash : Boolean indicating whether there is a flash device
13608* associated with given camera
13609* @flashNode : If a flash device exists, this will be its device node.
13610*
13611* RETURN :
13612* None
13613*==========================================================================*/
13614void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13615 bool& hasFlash,
13616 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13617{
13618 cam_capability_t* camCapability = gCamCapability[cameraId];
13619 if (NULL == camCapability) {
13620 hasFlash = false;
13621 flashNode[0] = '\0';
13622 } else {
13623 hasFlash = camCapability->flash_available;
13624 strlcpy(flashNode,
13625 (char*)camCapability->flash_dev_name,
13626 QCAMERA_MAX_FILEPATH_LENGTH);
13627 }
13628}
13629
13630/*===========================================================================
13631* FUNCTION : getEepromVersionInfo
13632*
13633* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13634*
13635* PARAMETERS : None
13636*
13637* RETURN : string describing EEPROM version
13638* "\0" if no such info available
13639*==========================================================================*/
13640const char *QCamera3HardwareInterface::getEepromVersionInfo()
13641{
13642 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13643}
13644
13645/*===========================================================================
13646* FUNCTION : getLdafCalib
13647*
13648* DESCRIPTION: Retrieve Laser AF calibration data
13649*
13650* PARAMETERS : None
13651*
13652* RETURN : Two uint32_t describing laser AF calibration data
13653* NULL if none is available.
13654*==========================================================================*/
13655const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13656{
13657 if (mLdafCalibExist) {
13658 return &mLdafCalib[0];
13659 } else {
13660 return NULL;
13661 }
13662}
13663
13664/*===========================================================================
13665 * FUNCTION : dynamicUpdateMetaStreamInfo
13666 *
13667 * DESCRIPTION: This function:
13668 * (1) stops all the channels
13669 * (2) returns error on pending requests and buffers
13670 * (3) sends metastream_info in setparams
13671 * (4) starts all channels
13672 * This is useful when sensor has to be restarted to apply any
13673 * settings such as frame rate from a different sensor mode
13674 *
13675 * PARAMETERS : None
13676 *
13677 * RETURN : NO_ERROR on success
13678 * Error codes on failure
13679 *
13680 *==========================================================================*/
13681int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13682{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013683 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013684 int rc = NO_ERROR;
13685
13686 LOGD("E");
13687
13688 rc = stopAllChannels();
13689 if (rc < 0) {
13690 LOGE("stopAllChannels failed");
13691 return rc;
13692 }
13693
13694 rc = notifyErrorForPendingRequests();
13695 if (rc < 0) {
13696 LOGE("notifyErrorForPendingRequests failed");
13697 return rc;
13698 }
13699
13700 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13701 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13702 "Format:%d",
13703 mStreamConfigInfo.type[i],
13704 mStreamConfigInfo.stream_sizes[i].width,
13705 mStreamConfigInfo.stream_sizes[i].height,
13706 mStreamConfigInfo.postprocess_mask[i],
13707 mStreamConfigInfo.format[i]);
13708 }
13709
13710 /* Send meta stream info once again so that ISP can start */
13711 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13712 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13713 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13714 mParameters);
13715 if (rc < 0) {
13716 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13717 }
13718
13719 rc = startAllChannels();
13720 if (rc < 0) {
13721 LOGE("startAllChannels failed");
13722 return rc;
13723 }
13724
13725 LOGD("X");
13726 return rc;
13727}
13728
13729/*===========================================================================
13730 * FUNCTION : stopAllChannels
13731 *
13732 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13733 *
13734 * PARAMETERS : None
13735 *
13736 * RETURN : NO_ERROR on success
13737 * Error codes on failure
13738 *
13739 *==========================================================================*/
13740int32_t QCamera3HardwareInterface::stopAllChannels()
13741{
13742 int32_t rc = NO_ERROR;
13743
13744 LOGD("Stopping all channels");
13745 // Stop the Streams/Channels
13746 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13747 it != mStreamInfo.end(); it++) {
13748 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13749 if (channel) {
13750 channel->stop();
13751 }
13752 (*it)->status = INVALID;
13753 }
13754
13755 if (mSupportChannel) {
13756 mSupportChannel->stop();
13757 }
13758 if (mAnalysisChannel) {
13759 mAnalysisChannel->stop();
13760 }
13761 if (mRawDumpChannel) {
13762 mRawDumpChannel->stop();
13763 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013764 if (mHdrPlusRawSrcChannel) {
13765 mHdrPlusRawSrcChannel->stop();
13766 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013767 if (mMetadataChannel) {
13768 /* If content of mStreamInfo is not 0, there is metadata stream */
13769 mMetadataChannel->stop();
13770 }
13771
13772 LOGD("All channels stopped");
13773 return rc;
13774}
13775
13776/*===========================================================================
13777 * FUNCTION : startAllChannels
13778 *
13779 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13780 *
13781 * PARAMETERS : None
13782 *
13783 * RETURN : NO_ERROR on success
13784 * Error codes on failure
13785 *
13786 *==========================================================================*/
13787int32_t QCamera3HardwareInterface::startAllChannels()
13788{
13789 int32_t rc = NO_ERROR;
13790
13791 LOGD("Start all channels ");
13792 // Start the Streams/Channels
13793 if (mMetadataChannel) {
13794 /* If content of mStreamInfo is not 0, there is metadata stream */
13795 rc = mMetadataChannel->start();
13796 if (rc < 0) {
13797 LOGE("META channel start failed");
13798 return rc;
13799 }
13800 }
13801 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13802 it != mStreamInfo.end(); it++) {
13803 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13804 if (channel) {
13805 rc = channel->start();
13806 if (rc < 0) {
13807 LOGE("channel start failed");
13808 return rc;
13809 }
13810 }
13811 }
13812 if (mAnalysisChannel) {
13813 mAnalysisChannel->start();
13814 }
13815 if (mSupportChannel) {
13816 rc = mSupportChannel->start();
13817 if (rc < 0) {
13818 LOGE("Support channel start failed");
13819 return rc;
13820 }
13821 }
13822 if (mRawDumpChannel) {
13823 rc = mRawDumpChannel->start();
13824 if (rc < 0) {
13825 LOGE("RAW dump channel start failed");
13826 return rc;
13827 }
13828 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013829 if (mHdrPlusRawSrcChannel) {
13830 rc = mHdrPlusRawSrcChannel->start();
13831 if (rc < 0) {
13832 LOGE("HDR+ RAW channel start failed");
13833 return rc;
13834 }
13835 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013836
13837 LOGD("All channels started");
13838 return rc;
13839}
13840
13841/*===========================================================================
13842 * FUNCTION : notifyErrorForPendingRequests
13843 *
13844 * DESCRIPTION: This function sends error for all the pending requests/buffers
13845 *
13846 * PARAMETERS : None
13847 *
13848 * RETURN : Error codes
13849 * NO_ERROR on success
13850 *
13851 *==========================================================================*/
13852int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13853{
13854 int32_t rc = NO_ERROR;
13855 unsigned int frameNum = 0;
13856 camera3_capture_result_t result;
13857 camera3_stream_buffer_t *pStream_Buf = NULL;
13858
13859 memset(&result, 0, sizeof(camera3_capture_result_t));
13860
13861 if (mPendingRequestsList.size() > 0) {
13862 pendingRequestIterator i = mPendingRequestsList.begin();
13863 frameNum = i->frame_number;
13864 } else {
13865 /* There might still be pending buffers even though there are
13866 no pending requests. Setting the frameNum to MAX so that
13867 all the buffers with smaller frame numbers are returned */
13868 frameNum = UINT_MAX;
13869 }
13870
13871 LOGH("Oldest frame num on mPendingRequestsList = %u",
13872 frameNum);
13873
Emilian Peev7650c122017-01-19 08:24:33 -080013874 notifyErrorFoPendingDepthData(mDepthChannel);
13875
Thierry Strudel3d639192016-09-09 11:52:26 -070013876 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13877 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13878
13879 if (req->frame_number < frameNum) {
13880 // Send Error notify to frameworks for each buffer for which
13881 // metadata buffer is already sent
13882 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13883 req->frame_number, req->mPendingBufferList.size());
13884
13885 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13886 if (NULL == pStream_Buf) {
13887 LOGE("No memory for pending buffers array");
13888 return NO_MEMORY;
13889 }
13890 memset(pStream_Buf, 0,
13891 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13892 result.result = NULL;
13893 result.frame_number = req->frame_number;
13894 result.num_output_buffers = req->mPendingBufferList.size();
13895 result.output_buffers = pStream_Buf;
13896
13897 size_t index = 0;
13898 for (auto info = req->mPendingBufferList.begin();
13899 info != req->mPendingBufferList.end(); ) {
13900
13901 camera3_notify_msg_t notify_msg;
13902 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13903 notify_msg.type = CAMERA3_MSG_ERROR;
13904 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13905 notify_msg.message.error.error_stream = info->stream;
13906 notify_msg.message.error.frame_number = req->frame_number;
13907 pStream_Buf[index].acquire_fence = -1;
13908 pStream_Buf[index].release_fence = -1;
13909 pStream_Buf[index].buffer = info->buffer;
13910 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13911 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013912 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013913 index++;
13914 // Remove buffer from list
13915 info = req->mPendingBufferList.erase(info);
13916 }
13917
13918 // Remove this request from Map
13919 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13920 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13921 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13922
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013923 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013924
13925 delete [] pStream_Buf;
13926 } else {
13927
13928 // Go through the pending requests info and send error request to framework
13929 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13930
13931 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13932
13933 // Send error notify to frameworks
13934 camera3_notify_msg_t notify_msg;
13935 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13936 notify_msg.type = CAMERA3_MSG_ERROR;
13937 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13938 notify_msg.message.error.error_stream = NULL;
13939 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013940 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013941
13942 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13943 if (NULL == pStream_Buf) {
13944 LOGE("No memory for pending buffers array");
13945 return NO_MEMORY;
13946 }
13947 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13948
13949 result.result = NULL;
13950 result.frame_number = req->frame_number;
13951 result.input_buffer = i->input_buffer;
13952 result.num_output_buffers = req->mPendingBufferList.size();
13953 result.output_buffers = pStream_Buf;
13954
13955 size_t index = 0;
13956 for (auto info = req->mPendingBufferList.begin();
13957 info != req->mPendingBufferList.end(); ) {
13958 pStream_Buf[index].acquire_fence = -1;
13959 pStream_Buf[index].release_fence = -1;
13960 pStream_Buf[index].buffer = info->buffer;
13961 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13962 pStream_Buf[index].stream = info->stream;
13963 index++;
13964 // Remove buffer from list
13965 info = req->mPendingBufferList.erase(info);
13966 }
13967
13968 // Remove this request from Map
13969 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13970 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13971 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13972
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013973 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013974 delete [] pStream_Buf;
13975 i = erasePendingRequest(i);
13976 }
13977 }
13978
13979 /* Reset pending frame Drop list and requests list */
13980 mPendingFrameDropList.clear();
13981
13982 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13983 req.mPendingBufferList.clear();
13984 }
13985 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013986 LOGH("Cleared all the pending buffers ");
13987
13988 return rc;
13989}
13990
13991bool QCamera3HardwareInterface::isOnEncoder(
13992 const cam_dimension_t max_viewfinder_size,
13993 uint32_t width, uint32_t height)
13994{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013995 return ((width > (uint32_t)max_viewfinder_size.width) ||
13996 (height > (uint32_t)max_viewfinder_size.height) ||
13997 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13998 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013999}
14000
14001/*===========================================================================
14002 * FUNCTION : setBundleInfo
14003 *
14004 * DESCRIPTION: Set bundle info for all streams that are bundle.
14005 *
14006 * PARAMETERS : None
14007 *
14008 * RETURN : NO_ERROR on success
14009 * Error codes on failure
14010 *==========================================================================*/
14011int32_t QCamera3HardwareInterface::setBundleInfo()
14012{
14013 int32_t rc = NO_ERROR;
14014
14015 if (mChannelHandle) {
14016 cam_bundle_config_t bundleInfo;
14017 memset(&bundleInfo, 0, sizeof(bundleInfo));
14018 rc = mCameraHandle->ops->get_bundle_info(
14019 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14020 if (rc != NO_ERROR) {
14021 LOGE("get_bundle_info failed");
14022 return rc;
14023 }
14024 if (mAnalysisChannel) {
14025 mAnalysisChannel->setBundleInfo(bundleInfo);
14026 }
14027 if (mSupportChannel) {
14028 mSupportChannel->setBundleInfo(bundleInfo);
14029 }
14030 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14031 it != mStreamInfo.end(); it++) {
14032 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14033 channel->setBundleInfo(bundleInfo);
14034 }
14035 if (mRawDumpChannel) {
14036 mRawDumpChannel->setBundleInfo(bundleInfo);
14037 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014038 if (mHdrPlusRawSrcChannel) {
14039 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14040 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014041 }
14042
14043 return rc;
14044}
14045
14046/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014047 * FUNCTION : setInstantAEC
14048 *
14049 * DESCRIPTION: Set Instant AEC related params.
14050 *
14051 * PARAMETERS :
14052 * @meta: CameraMetadata reference
14053 *
14054 * RETURN : NO_ERROR on success
14055 * Error codes on failure
14056 *==========================================================================*/
14057int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14058{
14059 int32_t rc = NO_ERROR;
14060 uint8_t val = 0;
14061 char prop[PROPERTY_VALUE_MAX];
14062
14063 // First try to configure instant AEC from framework metadata
14064 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14065 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14066 }
14067
14068 // If framework did not set this value, try to read from set prop.
14069 if (val == 0) {
14070 memset(prop, 0, sizeof(prop));
14071 property_get("persist.camera.instant.aec", prop, "0");
14072 val = (uint8_t)atoi(prop);
14073 }
14074
14075 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14076 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14077 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14078 mInstantAEC = val;
14079 mInstantAECSettledFrameNumber = 0;
14080 mInstantAecFrameIdxCount = 0;
14081 LOGH("instantAEC value set %d",val);
14082 if (mInstantAEC) {
14083 memset(prop, 0, sizeof(prop));
14084 property_get("persist.camera.ae.instant.bound", prop, "10");
14085 int32_t aec_frame_skip_cnt = atoi(prop);
14086 if (aec_frame_skip_cnt >= 0) {
14087 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14088 } else {
14089 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14090 rc = BAD_VALUE;
14091 }
14092 }
14093 } else {
14094 LOGE("Bad instant aec value set %d", val);
14095 rc = BAD_VALUE;
14096 }
14097 return rc;
14098}
14099
14100/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014101 * FUNCTION : get_num_overall_buffers
14102 *
14103 * DESCRIPTION: Estimate number of pending buffers across all requests.
14104 *
14105 * PARAMETERS : None
14106 *
14107 * RETURN : Number of overall pending buffers
14108 *
14109 *==========================================================================*/
14110uint32_t PendingBuffersMap::get_num_overall_buffers()
14111{
14112 uint32_t sum_buffers = 0;
14113 for (auto &req : mPendingBuffersInRequest) {
14114 sum_buffers += req.mPendingBufferList.size();
14115 }
14116 return sum_buffers;
14117}
14118
14119/*===========================================================================
14120 * FUNCTION : removeBuf
14121 *
14122 * DESCRIPTION: Remove a matching buffer from tracker.
14123 *
14124 * PARAMETERS : @buffer: image buffer for the callback
14125 *
14126 * RETURN : None
14127 *
14128 *==========================================================================*/
14129void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14130{
14131 bool buffer_found = false;
14132 for (auto req = mPendingBuffersInRequest.begin();
14133 req != mPendingBuffersInRequest.end(); req++) {
14134 for (auto k = req->mPendingBufferList.begin();
14135 k != req->mPendingBufferList.end(); k++ ) {
14136 if (k->buffer == buffer) {
14137 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14138 req->frame_number, buffer);
14139 k = req->mPendingBufferList.erase(k);
14140 if (req->mPendingBufferList.empty()) {
14141 // Remove this request from Map
14142 req = mPendingBuffersInRequest.erase(req);
14143 }
14144 buffer_found = true;
14145 break;
14146 }
14147 }
14148 if (buffer_found) {
14149 break;
14150 }
14151 }
14152 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14153 get_num_overall_buffers());
14154}
14155
14156/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014157 * FUNCTION : getBufErrStatus
14158 *
14159 * DESCRIPTION: get buffer error status
14160 *
14161 * PARAMETERS : @buffer: buffer handle
14162 *
14163 * RETURN : Error status
14164 *
14165 *==========================================================================*/
14166int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14167{
14168 for (auto& req : mPendingBuffersInRequest) {
14169 for (auto& k : req.mPendingBufferList) {
14170 if (k.buffer == buffer)
14171 return k.bufStatus;
14172 }
14173 }
14174 return CAMERA3_BUFFER_STATUS_OK;
14175}
14176
14177/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014178 * FUNCTION : setPAAFSupport
14179 *
14180 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14181 * feature mask according to stream type and filter
14182 * arrangement
14183 *
14184 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14185 * @stream_type: stream type
14186 * @filter_arrangement: filter arrangement
14187 *
14188 * RETURN : None
14189 *==========================================================================*/
14190void QCamera3HardwareInterface::setPAAFSupport(
14191 cam_feature_mask_t& feature_mask,
14192 cam_stream_type_t stream_type,
14193 cam_color_filter_arrangement_t filter_arrangement)
14194{
Thierry Strudel3d639192016-09-09 11:52:26 -070014195 switch (filter_arrangement) {
14196 case CAM_FILTER_ARRANGEMENT_RGGB:
14197 case CAM_FILTER_ARRANGEMENT_GRBG:
14198 case CAM_FILTER_ARRANGEMENT_GBRG:
14199 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014200 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14201 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014202 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014203 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14204 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014205 }
14206 break;
14207 case CAM_FILTER_ARRANGEMENT_Y:
14208 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14209 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14210 }
14211 break;
14212 default:
14213 break;
14214 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014215 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14216 feature_mask, stream_type, filter_arrangement);
14217
14218
Thierry Strudel3d639192016-09-09 11:52:26 -070014219}
14220
14221/*===========================================================================
14222* FUNCTION : getSensorMountAngle
14223*
14224* DESCRIPTION: Retrieve sensor mount angle
14225*
14226* PARAMETERS : None
14227*
14228* RETURN : sensor mount angle in uint32_t
14229*==========================================================================*/
14230uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14231{
14232 return gCamCapability[mCameraId]->sensor_mount_angle;
14233}
14234
14235/*===========================================================================
14236* FUNCTION : getRelatedCalibrationData
14237*
14238* DESCRIPTION: Retrieve related system calibration data
14239*
14240* PARAMETERS : None
14241*
14242* RETURN : Pointer of related system calibration data
14243*==========================================================================*/
14244const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14245{
14246 return (const cam_related_system_calibration_data_t *)
14247 &(gCamCapability[mCameraId]->related_cam_calibration);
14248}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014249
14250/*===========================================================================
14251 * FUNCTION : is60HzZone
14252 *
14253 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14254 *
14255 * PARAMETERS : None
14256 *
14257 * RETURN : True if in 60Hz zone, False otherwise
14258 *==========================================================================*/
14259bool QCamera3HardwareInterface::is60HzZone()
14260{
14261 time_t t = time(NULL);
14262 struct tm lt;
14263
14264 struct tm* r = localtime_r(&t, &lt);
14265
14266 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14267 return true;
14268 else
14269 return false;
14270}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014271
14272/*===========================================================================
14273 * FUNCTION : adjustBlackLevelForCFA
14274 *
14275 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14276 * of bayer CFA (Color Filter Array).
14277 *
14278 * PARAMETERS : @input: black level pattern in the order of RGGB
14279 * @output: black level pattern in the order of CFA
14280 * @color_arrangement: CFA color arrangement
14281 *
14282 * RETURN : None
14283 *==========================================================================*/
14284template<typename T>
14285void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14286 T input[BLACK_LEVEL_PATTERN_CNT],
14287 T output[BLACK_LEVEL_PATTERN_CNT],
14288 cam_color_filter_arrangement_t color_arrangement)
14289{
14290 switch (color_arrangement) {
14291 case CAM_FILTER_ARRANGEMENT_GRBG:
14292 output[0] = input[1];
14293 output[1] = input[0];
14294 output[2] = input[3];
14295 output[3] = input[2];
14296 break;
14297 case CAM_FILTER_ARRANGEMENT_GBRG:
14298 output[0] = input[2];
14299 output[1] = input[3];
14300 output[2] = input[0];
14301 output[3] = input[1];
14302 break;
14303 case CAM_FILTER_ARRANGEMENT_BGGR:
14304 output[0] = input[3];
14305 output[1] = input[2];
14306 output[2] = input[1];
14307 output[3] = input[0];
14308 break;
14309 case CAM_FILTER_ARRANGEMENT_RGGB:
14310 output[0] = input[0];
14311 output[1] = input[1];
14312 output[2] = input[2];
14313 output[3] = input[3];
14314 break;
14315 default:
14316 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14317 break;
14318 }
14319}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014320
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014321void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14322 CameraMetadata &resultMetadata,
14323 std::shared_ptr<metadata_buffer_t> settings)
14324{
14325 if (settings == nullptr) {
14326 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14327 return;
14328 }
14329
14330 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14331 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14332 }
14333
14334 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14335 String8 str((const char *)gps_methods);
14336 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14337 }
14338
14339 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14340 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14341 }
14342
14343 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14344 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14345 }
14346
14347 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14348 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14349 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14350 }
14351
14352 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14353 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14354 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14355 }
14356
14357 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14358 int32_t fwk_thumb_size[2];
14359 fwk_thumb_size[0] = thumb_size->width;
14360 fwk_thumb_size[1] = thumb_size->height;
14361 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14362 }
14363
14364 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14365 uint8_t fwk_intent = intent[0];
14366 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14367 }
14368}
14369
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014370bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14371 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14372 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014373{
14374 if (hdrPlusRequest == nullptr) return false;
14375
14376 // Check noise reduction mode is high quality.
14377 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14378 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14379 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014380 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14381 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014382 return false;
14383 }
14384
14385 // Check edge mode is high quality.
14386 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14387 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14388 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14389 return false;
14390 }
14391
14392 if (request.num_output_buffers != 1 ||
14393 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14394 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014395 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14396 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14397 request.output_buffers[0].stream->width,
14398 request.output_buffers[0].stream->height,
14399 request.output_buffers[0].stream->format);
14400 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014401 return false;
14402 }
14403
14404 // Get a YUV buffer from pic channel.
14405 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14406 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14407 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14408 if (res != OK) {
14409 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14410 __FUNCTION__, strerror(-res), res);
14411 return false;
14412 }
14413
14414 pbcamera::StreamBuffer buffer;
14415 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014416 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014417 buffer.data = yuvBuffer->buffer;
14418 buffer.dataSize = yuvBuffer->frame_len;
14419
14420 pbcamera::CaptureRequest pbRequest;
14421 pbRequest.id = request.frame_number;
14422 pbRequest.outputBuffers.push_back(buffer);
14423
14424 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014425 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014426 if (res != OK) {
14427 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14428 strerror(-res), res);
14429 return false;
14430 }
14431
14432 hdrPlusRequest->yuvBuffer = yuvBuffer;
14433 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14434
14435 return true;
14436}
14437
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014438status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14439 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14440 return OK;
14441 }
14442
14443 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14444 if (res != OK) {
14445 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14446 strerror(-res), res);
14447 return res;
14448 }
14449 gHdrPlusClientOpening = true;
14450
14451 return OK;
14452}
14453
Chien-Yu Chenee335912017-02-09 17:53:20 -080014454status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14455{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014456 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014457
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014458 // Check if gHdrPlusClient is opened or being opened.
14459 if (gHdrPlusClient == nullptr) {
14460 if (gHdrPlusClientOpening) {
14461 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14462 return OK;
14463 }
14464
14465 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014466 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014467 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14468 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014469 return res;
14470 }
14471
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014472 // When opening HDR+ client completes, HDR+ mode will be enabled.
14473 return OK;
14474
Chien-Yu Chenee335912017-02-09 17:53:20 -080014475 }
14476
14477 // Configure stream for HDR+.
14478 res = configureHdrPlusStreamsLocked();
14479 if (res != OK) {
14480 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014481 return res;
14482 }
14483
14484 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14485 res = gHdrPlusClient->setZslHdrPlusMode(true);
14486 if (res != OK) {
14487 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014488 return res;
14489 }
14490
14491 mHdrPlusModeEnabled = true;
14492 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14493
14494 return OK;
14495}
14496
14497void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14498{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014499 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014500 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014501 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14502 if (res != OK) {
14503 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14504 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014505
14506 // Close HDR+ client so Easel can enter low power mode.
14507 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14508 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014509 }
14510
14511 mHdrPlusModeEnabled = false;
14512 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14513}
14514
14515status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014516{
14517 pbcamera::InputConfiguration inputConfig;
14518 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14519 status_t res = OK;
14520
14521 // Configure HDR+ client streams.
14522 // Get input config.
14523 if (mHdrPlusRawSrcChannel) {
14524 // HDR+ input buffers will be provided by HAL.
14525 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14526 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14527 if (res != OK) {
14528 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14529 __FUNCTION__, strerror(-res), res);
14530 return res;
14531 }
14532
14533 inputConfig.isSensorInput = false;
14534 } else {
14535 // Sensor MIPI will send data to Easel.
14536 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014537 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014538 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14539 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14540 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14541 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14542 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14543 if (mSensorModeInfo.num_raw_bits != 10) {
14544 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14545 mSensorModeInfo.num_raw_bits);
14546 return BAD_VALUE;
14547 }
14548
14549 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014550 }
14551
14552 // Get output configurations.
14553 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014554 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014555
14556 // Easel may need to output YUV output buffers if mPictureChannel was created.
14557 pbcamera::StreamConfiguration yuvOutputConfig;
14558 if (mPictureChannel != nullptr) {
14559 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14560 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14561 if (res != OK) {
14562 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14563 __FUNCTION__, strerror(-res), res);
14564
14565 return res;
14566 }
14567
14568 outputStreamConfigs.push_back(yuvOutputConfig);
14569 }
14570
14571 // TODO: consider other channels for YUV output buffers.
14572
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014573 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014574 if (res != OK) {
14575 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14576 strerror(-res), res);
14577 return res;
14578 }
14579
14580 return OK;
14581}
14582
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014583void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14584 if (client == nullptr) {
14585 ALOGE("%s: Opened client is null.", __FUNCTION__);
14586 return;
14587 }
14588
14589 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14590
14591 Mutex::Autolock l(gHdrPlusClientLock);
14592 gHdrPlusClient = std::move(client);
14593 gHdrPlusClientOpening = false;
14594
14595 // Set static metadata.
14596 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14597 if (res != OK) {
14598 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14599 __FUNCTION__, strerror(-res), res);
14600 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14601 gHdrPlusClient = nullptr;
14602 return;
14603 }
14604
14605 // Enable HDR+ mode.
14606 res = enableHdrPlusModeLocked();
14607 if (res != OK) {
14608 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14609 }
14610}
14611
14612void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14613 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14614 Mutex::Autolock l(gHdrPlusClientLock);
14615 gHdrPlusClientOpening = false;
14616}
14617
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014618void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14619 const camera_metadata_t &resultMetadata) {
14620 if (result != nullptr) {
14621 if (result->outputBuffers.size() != 1) {
14622 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14623 result->outputBuffers.size());
14624 return;
14625 }
14626
14627 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14628 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14629 result->outputBuffers[0].streamId);
14630 return;
14631 }
14632
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014633 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014634 HdrPlusPendingRequest pendingRequest;
14635 {
14636 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14637 auto req = mHdrPlusPendingRequests.find(result->requestId);
14638 pendingRequest = req->second;
14639 }
14640
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014641 // Update the result metadata with the settings of the HDR+ still capture request because
14642 // the result metadata belongs to a ZSL buffer.
14643 CameraMetadata metadata;
14644 metadata = &resultMetadata;
14645 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14646 camera_metadata_t* updatedResultMetadata = metadata.release();
14647
14648 QCamera3PicChannel *picChannel =
14649 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14650
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014651 // Check if dumping HDR+ YUV output is enabled.
14652 char prop[PROPERTY_VALUE_MAX];
14653 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14654 bool dumpYuvOutput = atoi(prop);
14655
14656 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014657 // Dump yuv buffer to a ppm file.
14658 pbcamera::StreamConfiguration outputConfig;
14659 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14660 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14661 if (rc == OK) {
14662 char buf[FILENAME_MAX] = {};
14663 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14664 result->requestId, result->outputBuffers[0].streamId,
14665 outputConfig.image.width, outputConfig.image.height);
14666
14667 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14668 } else {
14669 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14670 __FUNCTION__, strerror(-rc), rc);
14671 }
14672 }
14673
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014674 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14675 auto halMetadata = std::make_shared<metadata_buffer_t>();
14676 clear_metadata_buffer(halMetadata.get());
14677
14678 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14679 // encoding.
14680 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14681 halStreamId, /*minFrameDuration*/0);
14682 if (res == OK) {
14683 // Return the buffer to pic channel for encoding.
14684 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14685 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14686 halMetadata);
14687 } else {
14688 // Return the buffer without encoding.
14689 // TODO: This should not happen but we may want to report an error buffer to camera
14690 // service.
14691 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14692 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14693 strerror(-res), res);
14694 }
14695
14696 // Send HDR+ metadata to framework.
14697 {
14698 pthread_mutex_lock(&mMutex);
14699
14700 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14701 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14702 pthread_mutex_unlock(&mMutex);
14703 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014704
14705 // Remove the HDR+ pending request.
14706 {
14707 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14708 auto req = mHdrPlusPendingRequests.find(result->requestId);
14709 mHdrPlusPendingRequests.erase(req);
14710 }
14711 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014712}
14713
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014714void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14715 // TODO: Handle HDR+ capture failures and send the failure to framework.
14716 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14717 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14718
14719 // Return the buffer to pic channel.
14720 QCamera3PicChannel *picChannel =
14721 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14722 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14723
14724 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014725}
14726
Thierry Strudel3d639192016-09-09 11:52:26 -070014727}; //end namespace qcamera