blob: 2588cd8ba8f2ff47db64b504a91d1c2610eaa455 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700503 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700504 mState(CLOSED),
505 mIsDeviceLinked(false),
506 mIsMainCamera(true),
507 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800509 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800510 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700511 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800512 mIsApInputUsedForHdrPlus(false),
513 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800514 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700515{
516 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700517 mCommon.init(gCamCapability[cameraId]);
518 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700519#ifndef USE_HAL_3_3
520 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
521#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700522 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700523#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700524 mCameraDevice.common.close = close_camera_device;
525 mCameraDevice.ops = &mCameraOps;
526 mCameraDevice.priv = this;
527 gCamCapability[cameraId]->version = CAM_HAL_V3;
528 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
529 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
530 gCamCapability[cameraId]->min_num_pp_bufs = 3;
531
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800532 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700533
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800534 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mPendingLiveRequest = 0;
536 mCurrentRequestId = -1;
537 pthread_mutex_init(&mMutex, NULL);
538
539 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
540 mDefaultMetadata[i] = NULL;
541
542 // Getting system props of different kinds
543 char prop[PROPERTY_VALUE_MAX];
544 memset(prop, 0, sizeof(prop));
545 property_get("persist.camera.raw.dump", prop, "0");
546 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800547 property_get("persist.camera.hal3.force.hdr", prop, "0");
548 mForceHdrSnapshot = atoi(prop);
549
Thierry Strudel3d639192016-09-09 11:52:26 -0700550 if (mEnableRawDump)
551 LOGD("Raw dump from Camera HAL enabled");
552
553 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
554 memset(mLdafCalib, 0, sizeof(mLdafCalib));
555
556 memset(prop, 0, sizeof(prop));
557 property_get("persist.camera.tnr.preview", prop, "0");
558 m_bTnrPreview = (uint8_t)atoi(prop);
559
560 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800561 property_get("persist.camera.swtnr.preview", prop, "1");
562 m_bSwTnrPreview = (uint8_t)atoi(prop);
563
564 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700565 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700566 m_bTnrVideo = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
569 property_get("persist.camera.avtimer.debug", prop, "0");
570 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800571 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700572
Thierry Strudel54dc9782017-02-15 12:12:10 -0800573 memset(prop, 0, sizeof(prop));
574 property_get("persist.camera.cacmode.disable", prop, "0");
575 m_cacModeDisabled = (uint8_t)atoi(prop);
576
Thierry Strudel3d639192016-09-09 11:52:26 -0700577 //Load and read GPU library.
578 lib_surface_utils = NULL;
579 LINK_get_surface_pixel_alignment = NULL;
580 mSurfaceStridePadding = CAM_PAD_TO_32;
581 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
582 if (lib_surface_utils) {
583 *(void **)&LINK_get_surface_pixel_alignment =
584 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
585 if (LINK_get_surface_pixel_alignment) {
586 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
587 }
588 dlclose(lib_surface_utils);
589 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700590
Emilian Peev0f3c3162017-03-15 12:57:46 +0000591 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
592 mPDSupported = (0 <= mPDIndex) ? true : false;
593
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700594 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700595}
596
597/*===========================================================================
598 * FUNCTION : ~QCamera3HardwareInterface
599 *
600 * DESCRIPTION: destructor of QCamera3HardwareInterface
601 *
602 * PARAMETERS : none
603 *
604 * RETURN : none
605 *==========================================================================*/
606QCamera3HardwareInterface::~QCamera3HardwareInterface()
607{
608 LOGD("E");
609
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800610 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700611
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800612 // Disable power hint and enable the perf lock for close camera
613 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
614 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
615
616 // unlink of dualcam during close camera
617 if (mIsDeviceLinked) {
618 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
619 &m_pDualCamCmdPtr->bundle_info;
620 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
621 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
622 pthread_mutex_lock(&gCamLock);
623
624 if (mIsMainCamera == 1) {
625 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
626 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
627 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
628 // related session id should be session id of linked session
629 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
630 } else {
631 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
632 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
633 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
634 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
635 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800636 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800637 pthread_mutex_unlock(&gCamLock);
638
639 rc = mCameraHandle->ops->set_dual_cam_cmd(
640 mCameraHandle->camera_handle);
641 if (rc < 0) {
642 LOGE("Dualcam: Unlink failed, but still proceed to close");
643 }
644 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700645
646 /* We need to stop all streams before deleting any stream */
647 if (mRawDumpChannel) {
648 mRawDumpChannel->stop();
649 }
650
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700651 if (mHdrPlusRawSrcChannel) {
652 mHdrPlusRawSrcChannel->stop();
653 }
654
Thierry Strudel3d639192016-09-09 11:52:26 -0700655 // NOTE: 'camera3_stream_t *' objects are already freed at
656 // this stage by the framework
657 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
658 it != mStreamInfo.end(); it++) {
659 QCamera3ProcessingChannel *channel = (*it)->channel;
660 if (channel) {
661 channel->stop();
662 }
663 }
664 if (mSupportChannel)
665 mSupportChannel->stop();
666
667 if (mAnalysisChannel) {
668 mAnalysisChannel->stop();
669 }
670 if (mMetadataChannel) {
671 mMetadataChannel->stop();
672 }
673 if (mChannelHandle) {
674 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
675 mChannelHandle);
676 LOGD("stopping channel %d", mChannelHandle);
677 }
678
679 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
680 it != mStreamInfo.end(); it++) {
681 QCamera3ProcessingChannel *channel = (*it)->channel;
682 if (channel)
683 delete channel;
684 free (*it);
685 }
686 if (mSupportChannel) {
687 delete mSupportChannel;
688 mSupportChannel = NULL;
689 }
690
691 if (mAnalysisChannel) {
692 delete mAnalysisChannel;
693 mAnalysisChannel = NULL;
694 }
695 if (mRawDumpChannel) {
696 delete mRawDumpChannel;
697 mRawDumpChannel = NULL;
698 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700699 if (mHdrPlusRawSrcChannel) {
700 delete mHdrPlusRawSrcChannel;
701 mHdrPlusRawSrcChannel = NULL;
702 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700703 if (mDummyBatchChannel) {
704 delete mDummyBatchChannel;
705 mDummyBatchChannel = NULL;
706 }
707
708 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800709 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700710
711 if (mMetadataChannel) {
712 delete mMetadataChannel;
713 mMetadataChannel = NULL;
714 }
715
716 /* Clean up all channels */
717 if (mCameraInitialized) {
718 if(!mFirstConfiguration){
719 //send the last unconfigure
720 cam_stream_size_info_t stream_config_info;
721 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
722 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
723 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800724 m_bIs4KVideo ? 0 :
725 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700726 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700727 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
728 stream_config_info);
729 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
730 if (rc < 0) {
731 LOGE("set_parms failed for unconfigure");
732 }
733 }
734 deinitParameters();
735 }
736
737 if (mChannelHandle) {
738 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
739 mChannelHandle);
740 LOGH("deleting channel %d", mChannelHandle);
741 mChannelHandle = 0;
742 }
743
744 if (mState != CLOSED)
745 closeCamera();
746
747 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
748 req.mPendingBufferList.clear();
749 }
750 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700751 for (pendingRequestIterator i = mPendingRequestsList.begin();
752 i != mPendingRequestsList.end();) {
753 i = erasePendingRequest(i);
754 }
755 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
756 if (mDefaultMetadata[i])
757 free_camera_metadata(mDefaultMetadata[i]);
758
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800759 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700760
761 pthread_cond_destroy(&mRequestCond);
762
763 pthread_cond_destroy(&mBuffersCond);
764
765 pthread_mutex_destroy(&mMutex);
766 LOGD("X");
767}
768
769/*===========================================================================
770 * FUNCTION : erasePendingRequest
771 *
772 * DESCRIPTION: function to erase a desired pending request after freeing any
773 * allocated memory
774 *
775 * PARAMETERS :
776 * @i : iterator pointing to pending request to be erased
777 *
778 * RETURN : iterator pointing to the next request
779 *==========================================================================*/
780QCamera3HardwareInterface::pendingRequestIterator
781 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
782{
783 if (i->input_buffer != NULL) {
784 free(i->input_buffer);
785 i->input_buffer = NULL;
786 }
787 if (i->settings != NULL)
788 free_camera_metadata((camera_metadata_t*)i->settings);
789 return mPendingRequestsList.erase(i);
790}
791
792/*===========================================================================
793 * FUNCTION : camEvtHandle
794 *
795 * DESCRIPTION: Function registered to mm-camera-interface to handle events
796 *
797 * PARAMETERS :
798 * @camera_handle : interface layer camera handle
799 * @evt : ptr to event
800 * @user_data : user data ptr
801 *
802 * RETURN : none
803 *==========================================================================*/
804void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
805 mm_camera_event_t *evt,
806 void *user_data)
807{
808 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
809 if (obj && evt) {
810 switch(evt->server_event_type) {
811 case CAM_EVENT_TYPE_DAEMON_DIED:
812 pthread_mutex_lock(&obj->mMutex);
813 obj->mState = ERROR;
814 pthread_mutex_unlock(&obj->mMutex);
815 LOGE("Fatal, camera daemon died");
816 break;
817
818 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
819 LOGD("HAL got request pull from Daemon");
820 pthread_mutex_lock(&obj->mMutex);
821 obj->mWokenUpByDaemon = true;
822 obj->unblockRequestIfNecessary();
823 pthread_mutex_unlock(&obj->mMutex);
824 break;
825
826 default:
827 LOGW("Warning: Unhandled event %d",
828 evt->server_event_type);
829 break;
830 }
831 } else {
832 LOGE("NULL user_data/evt");
833 }
834}
835
836/*===========================================================================
837 * FUNCTION : openCamera
838 *
839 * DESCRIPTION: open camera
840 *
841 * PARAMETERS :
842 * @hw_device : double ptr for camera device struct
843 *
844 * RETURN : int32_t type of status
845 * NO_ERROR -- success
846 * none-zero failure code
847 *==========================================================================*/
848int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
849{
850 int rc = 0;
851 if (mState != CLOSED) {
852 *hw_device = NULL;
853 return PERMISSION_DENIED;
854 }
855
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800856 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700857 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
858 mCameraId);
859
860 rc = openCamera();
861 if (rc == 0) {
862 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800863 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700864 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800865 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700866
Thierry Strudel3d639192016-09-09 11:52:26 -0700867 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
868 mCameraId, rc);
869
870 if (rc == NO_ERROR) {
871 mState = OPENED;
872 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800873
Thierry Strudel3d639192016-09-09 11:52:26 -0700874 return rc;
875}
876
877/*===========================================================================
878 * FUNCTION : openCamera
879 *
880 * DESCRIPTION: open camera
881 *
882 * PARAMETERS : none
883 *
884 * RETURN : int32_t type of status
885 * NO_ERROR -- success
886 * none-zero failure code
887 *==========================================================================*/
888int QCamera3HardwareInterface::openCamera()
889{
890 int rc = 0;
891 char value[PROPERTY_VALUE_MAX];
892
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800893 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700894 if (mCameraHandle) {
895 LOGE("Failure: Camera already opened");
896 return ALREADY_EXISTS;
897 }
898
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700899 {
900 Mutex::Autolock l(gHdrPlusClientLock);
901 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700902 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700903 rc = gEaselManagerClient.resume();
904 if (rc != 0) {
905 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
906 return rc;
907 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800908 }
909 }
910
Thierry Strudel3d639192016-09-09 11:52:26 -0700911 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
912 if (rc < 0) {
913 LOGE("Failed to reserve flash for camera id: %d",
914 mCameraId);
915 return UNKNOWN_ERROR;
916 }
917
918 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
919 if (rc) {
920 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
921 return rc;
922 }
923
924 if (!mCameraHandle) {
925 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
926 return -ENODEV;
927 }
928
929 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
930 camEvtHandle, (void *)this);
931
932 if (rc < 0) {
933 LOGE("Error, failed to register event callback");
934 /* Not closing camera here since it is already handled in destructor */
935 return FAILED_TRANSACTION;
936 }
937
938 mExifParams.debug_params =
939 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
940 if (mExifParams.debug_params) {
941 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
942 } else {
943 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
944 return NO_MEMORY;
945 }
946 mFirstConfiguration = true;
947
948 //Notify display HAL that a camera session is active.
949 //But avoid calling the same during bootup because camera service might open/close
950 //cameras at boot time during its initialization and display service will also internally
951 //wait for camera service to initialize first while calling this display API, resulting in a
952 //deadlock situation. Since boot time camera open/close calls are made only to fetch
953 //capabilities, no need of this display bw optimization.
954 //Use "service.bootanim.exit" property to know boot status.
955 property_get("service.bootanim.exit", value, "0");
956 if (atoi(value) == 1) {
957 pthread_mutex_lock(&gCamLock);
958 if (gNumCameraSessions++ == 0) {
959 setCameraLaunchStatus(true);
960 }
961 pthread_mutex_unlock(&gCamLock);
962 }
963
964 //fill the session id needed while linking dual cam
965 pthread_mutex_lock(&gCamLock);
966 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
967 &sessionId[mCameraId]);
968 pthread_mutex_unlock(&gCamLock);
969
970 if (rc < 0) {
971 LOGE("Error, failed to get sessiion id");
972 return UNKNOWN_ERROR;
973 } else {
974 //Allocate related cam sync buffer
975 //this is needed for the payload that goes along with bundling cmd for related
976 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700977 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
978 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700979 if(rc != OK) {
980 rc = NO_MEMORY;
981 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
982 return NO_MEMORY;
983 }
984
985 //Map memory for related cam sync buffer
986 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700987 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
988 m_pDualCamCmdHeap->getFd(0),
989 sizeof(cam_dual_camera_cmd_info_t),
990 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700991 if(rc < 0) {
992 LOGE("Dualcam: failed to map Related cam sync buffer");
993 rc = FAILED_TRANSACTION;
994 return NO_MEMORY;
995 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700996 m_pDualCamCmdPtr =
997 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700998 }
999
1000 LOGH("mCameraId=%d",mCameraId);
1001
1002 return NO_ERROR;
1003}
1004
1005/*===========================================================================
1006 * FUNCTION : closeCamera
1007 *
1008 * DESCRIPTION: close camera
1009 *
1010 * PARAMETERS : none
1011 *
1012 * RETURN : int32_t type of status
1013 * NO_ERROR -- success
1014 * none-zero failure code
1015 *==========================================================================*/
1016int QCamera3HardwareInterface::closeCamera()
1017{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001018 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001019 int rc = NO_ERROR;
1020 char value[PROPERTY_VALUE_MAX];
1021
1022 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1023 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001024
1025 // unmap memory for related cam sync buffer
1026 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001027 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001028 if (NULL != m_pDualCamCmdHeap) {
1029 m_pDualCamCmdHeap->deallocate();
1030 delete m_pDualCamCmdHeap;
1031 m_pDualCamCmdHeap = NULL;
1032 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001033 }
1034
Thierry Strudel3d639192016-09-09 11:52:26 -07001035 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1036 mCameraHandle = NULL;
1037
1038 //reset session id to some invalid id
1039 pthread_mutex_lock(&gCamLock);
1040 sessionId[mCameraId] = 0xDEADBEEF;
1041 pthread_mutex_unlock(&gCamLock);
1042
1043 //Notify display HAL that there is no active camera session
1044 //but avoid calling the same during bootup. Refer to openCamera
1045 //for more details.
1046 property_get("service.bootanim.exit", value, "0");
1047 if (atoi(value) == 1) {
1048 pthread_mutex_lock(&gCamLock);
1049 if (--gNumCameraSessions == 0) {
1050 setCameraLaunchStatus(false);
1051 }
1052 pthread_mutex_unlock(&gCamLock);
1053 }
1054
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 if (mExifParams.debug_params) {
1056 free(mExifParams.debug_params);
1057 mExifParams.debug_params = NULL;
1058 }
1059 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1060 LOGW("Failed to release flash for camera id: %d",
1061 mCameraId);
1062 }
1063 mState = CLOSED;
1064 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1065 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001066
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001067 {
1068 Mutex::Autolock l(gHdrPlusClientLock);
1069 if (gHdrPlusClient != nullptr) {
1070 // Disable HDR+ mode.
1071 disableHdrPlusModeLocked();
1072 // Disconnect Easel if it's connected.
1073 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1074 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001075 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001076
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001077 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001078 rc = gEaselManagerClient.stopMipi(mCameraId);
1079 if (rc != 0) {
1080 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1081 }
1082
1083 rc = gEaselManagerClient.suspend();
1084 if (rc != 0) {
1085 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1086 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001087 }
1088 }
1089
Thierry Strudel3d639192016-09-09 11:52:26 -07001090 return rc;
1091}
1092
1093/*===========================================================================
1094 * FUNCTION : initialize
1095 *
1096 * DESCRIPTION: Initialize frameworks callback functions
1097 *
1098 * PARAMETERS :
1099 * @callback_ops : callback function to frameworks
1100 *
1101 * RETURN :
1102 *
1103 *==========================================================================*/
1104int QCamera3HardwareInterface::initialize(
1105 const struct camera3_callback_ops *callback_ops)
1106{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001107 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001108 int rc;
1109
1110 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1111 pthread_mutex_lock(&mMutex);
1112
1113 // Validate current state
1114 switch (mState) {
1115 case OPENED:
1116 /* valid state */
1117 break;
1118 default:
1119 LOGE("Invalid state %d", mState);
1120 rc = -ENODEV;
1121 goto err1;
1122 }
1123
1124 rc = initParameters();
1125 if (rc < 0) {
1126 LOGE("initParamters failed %d", rc);
1127 goto err1;
1128 }
1129 mCallbackOps = callback_ops;
1130
1131 mChannelHandle = mCameraHandle->ops->add_channel(
1132 mCameraHandle->camera_handle, NULL, NULL, this);
1133 if (mChannelHandle == 0) {
1134 LOGE("add_channel failed");
1135 rc = -ENOMEM;
1136 pthread_mutex_unlock(&mMutex);
1137 return rc;
1138 }
1139
1140 pthread_mutex_unlock(&mMutex);
1141 mCameraInitialized = true;
1142 mState = INITIALIZED;
1143 LOGI("X");
1144 return 0;
1145
1146err1:
1147 pthread_mutex_unlock(&mMutex);
1148 return rc;
1149}
1150
1151/*===========================================================================
1152 * FUNCTION : validateStreamDimensions
1153 *
1154 * DESCRIPTION: Check if the configuration requested are those advertised
1155 *
1156 * PARAMETERS :
1157 * @stream_list : streams to be configured
1158 *
1159 * RETURN :
1160 *
1161 *==========================================================================*/
1162int QCamera3HardwareInterface::validateStreamDimensions(
1163 camera3_stream_configuration_t *streamList)
1164{
1165 int rc = NO_ERROR;
1166 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001167 uint32_t depthWidth = 0;
1168 uint32_t depthHeight = 0;
1169 if (mPDSupported) {
1170 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1171 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1172 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001173
1174 camera3_stream_t *inputStream = NULL;
1175 /*
1176 * Loop through all streams to find input stream if it exists*
1177 */
1178 for (size_t i = 0; i< streamList->num_streams; i++) {
1179 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1180 if (inputStream != NULL) {
1181 LOGE("Error, Multiple input streams requested");
1182 return -EINVAL;
1183 }
1184 inputStream = streamList->streams[i];
1185 }
1186 }
1187 /*
1188 * Loop through all streams requested in configuration
1189 * Check if unsupported sizes have been requested on any of them
1190 */
1191 for (size_t j = 0; j < streamList->num_streams; j++) {
1192 bool sizeFound = false;
1193 camera3_stream_t *newStream = streamList->streams[j];
1194
1195 uint32_t rotatedHeight = newStream->height;
1196 uint32_t rotatedWidth = newStream->width;
1197 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1198 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1199 rotatedHeight = newStream->width;
1200 rotatedWidth = newStream->height;
1201 }
1202
1203 /*
1204 * Sizes are different for each type of stream format check against
1205 * appropriate table.
1206 */
1207 switch (newStream->format) {
1208 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1209 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1210 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001211 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1212 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1213 mPDSupported) {
1214 if ((depthWidth == newStream->width) &&
1215 (depthHeight == newStream->height)) {
1216 sizeFound = true;
1217 }
1218 break;
1219 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001220 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1221 for (size_t i = 0; i < count; i++) {
1222 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1223 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1224 sizeFound = true;
1225 break;
1226 }
1227 }
1228 break;
1229 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001230 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1231 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001232 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001233 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001234 if ((depthSamplesCount == newStream->width) &&
1235 (1 == newStream->height)) {
1236 sizeFound = true;
1237 }
1238 break;
1239 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001240 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1241 /* Verify set size against generated sizes table */
1242 for (size_t i = 0; i < count; i++) {
1243 if (((int32_t)rotatedWidth ==
1244 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1245 ((int32_t)rotatedHeight ==
1246 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1247 sizeFound = true;
1248 break;
1249 }
1250 }
1251 break;
1252 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1253 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1254 default:
1255 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1256 || newStream->stream_type == CAMERA3_STREAM_INPUT
1257 || IS_USAGE_ZSL(newStream->usage)) {
1258 if (((int32_t)rotatedWidth ==
1259 gCamCapability[mCameraId]->active_array_size.width) &&
1260 ((int32_t)rotatedHeight ==
1261 gCamCapability[mCameraId]->active_array_size.height)) {
1262 sizeFound = true;
1263 break;
1264 }
1265 /* We could potentially break here to enforce ZSL stream
1266 * set from frameworks always is full active array size
1267 * but it is not clear from the spc if framework will always
1268 * follow that, also we have logic to override to full array
1269 * size, so keeping the logic lenient at the moment
1270 */
1271 }
1272 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1273 MAX_SIZES_CNT);
1274 for (size_t i = 0; i < count; i++) {
1275 if (((int32_t)rotatedWidth ==
1276 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1277 ((int32_t)rotatedHeight ==
1278 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1279 sizeFound = true;
1280 break;
1281 }
1282 }
1283 break;
1284 } /* End of switch(newStream->format) */
1285
1286 /* We error out even if a single stream has unsupported size set */
1287 if (!sizeFound) {
1288 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1289 rotatedWidth, rotatedHeight, newStream->format,
1290 gCamCapability[mCameraId]->active_array_size.width,
1291 gCamCapability[mCameraId]->active_array_size.height);
1292 rc = -EINVAL;
1293 break;
1294 }
1295 } /* End of for each stream */
1296 return rc;
1297}
1298
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001299/*===========================================================================
1300 * FUNCTION : validateUsageFlags
1301 *
1302 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1303 *
1304 * PARAMETERS :
1305 * @stream_list : streams to be configured
1306 *
1307 * RETURN :
1308 * NO_ERROR if the usage flags are supported
1309 * error code if usage flags are not supported
1310 *
1311 *==========================================================================*/
1312int QCamera3HardwareInterface::validateUsageFlags(
1313 const camera3_stream_configuration_t* streamList)
1314{
1315 for (size_t j = 0; j < streamList->num_streams; j++) {
1316 const camera3_stream_t *newStream = streamList->streams[j];
1317
1318 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1319 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1320 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1321 continue;
1322 }
1323
1324 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1325 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1326 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1327 bool forcePreviewUBWC = true;
1328 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1329 forcePreviewUBWC = false;
1330 }
1331 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1332 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1333 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1334 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1335 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1336 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1337
1338 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1339 // So color spaces will always match.
1340
1341 // Check whether underlying formats of shared streams match.
1342 if (isVideo && isPreview && videoFormat != previewFormat) {
1343 LOGE("Combined video and preview usage flag is not supported");
1344 return -EINVAL;
1345 }
1346 if (isPreview && isZSL && previewFormat != zslFormat) {
1347 LOGE("Combined preview and zsl usage flag is not supported");
1348 return -EINVAL;
1349 }
1350 if (isVideo && isZSL && videoFormat != zslFormat) {
1351 LOGE("Combined video and zsl usage flag is not supported");
1352 return -EINVAL;
1353 }
1354 }
1355 return NO_ERROR;
1356}
1357
1358/*===========================================================================
1359 * FUNCTION : validateUsageFlagsForEis
1360 *
1361 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1362 *
1363 * PARAMETERS :
1364 * @stream_list : streams to be configured
1365 *
1366 * RETURN :
1367 * NO_ERROR if the usage flags are supported
1368 * error code if usage flags are not supported
1369 *
1370 *==========================================================================*/
1371int QCamera3HardwareInterface::validateUsageFlagsForEis(
1372 const camera3_stream_configuration_t* streamList)
1373{
1374 for (size_t j = 0; j < streamList->num_streams; j++) {
1375 const camera3_stream_t *newStream = streamList->streams[j];
1376
1377 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1378 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1379
1380 // Because EIS is "hard-coded" for certain use case, and current
1381 // implementation doesn't support shared preview and video on the same
1382 // stream, return failure if EIS is forced on.
1383 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1384 LOGE("Combined video and preview usage flag is not supported due to EIS");
1385 return -EINVAL;
1386 }
1387 }
1388 return NO_ERROR;
1389}
1390
Thierry Strudel3d639192016-09-09 11:52:26 -07001391/*==============================================================================
1392 * FUNCTION : isSupportChannelNeeded
1393 *
1394 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 * @stream_config_info : the config info for streams to be configured
1399 *
1400 * RETURN : Boolen true/false decision
1401 *
1402 *==========================================================================*/
1403bool QCamera3HardwareInterface::isSupportChannelNeeded(
1404 camera3_stream_configuration_t *streamList,
1405 cam_stream_size_info_t stream_config_info)
1406{
1407 uint32_t i;
1408 bool pprocRequested = false;
1409 /* Check for conditions where PProc pipeline does not have any streams*/
1410 for (i = 0; i < stream_config_info.num_streams; i++) {
1411 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1412 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1413 pprocRequested = true;
1414 break;
1415 }
1416 }
1417
1418 if (pprocRequested == false )
1419 return true;
1420
1421 /* Dummy stream needed if only raw or jpeg streams present */
1422 for (i = 0; i < streamList->num_streams; i++) {
1423 switch(streamList->streams[i]->format) {
1424 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1425 case HAL_PIXEL_FORMAT_RAW10:
1426 case HAL_PIXEL_FORMAT_RAW16:
1427 case HAL_PIXEL_FORMAT_BLOB:
1428 break;
1429 default:
1430 return false;
1431 }
1432 }
1433 return true;
1434}
1435
1436/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001437 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001438 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001439 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001440 *
1441 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001442 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001443 *
1444 * RETURN : int32_t type of status
1445 * NO_ERROR -- success
1446 * none-zero failure code
1447 *
1448 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001449int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001450{
1451 int32_t rc = NO_ERROR;
1452
1453 cam_dimension_t max_dim = {0, 0};
1454 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1455 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1456 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1457 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1458 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1459 }
1460
1461 clear_metadata_buffer(mParameters);
1462
1463 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1464 max_dim);
1465 if (rc != NO_ERROR) {
1466 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1467 return rc;
1468 }
1469
1470 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1471 if (rc != NO_ERROR) {
1472 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1473 return rc;
1474 }
1475
1476 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001477 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001478
1479 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1480 mParameters);
1481 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001483 return rc;
1484 }
1485
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001486 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001487 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1488 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1489 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1490 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1491 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001492
1493 return rc;
1494}
1495
1496/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001497 * FUNCTION : addToPPFeatureMask
1498 *
1499 * DESCRIPTION: add additional features to pp feature mask based on
1500 * stream type and usecase
1501 *
1502 * PARAMETERS :
1503 * @stream_format : stream type for feature mask
1504 * @stream_idx : stream idx within postprocess_mask list to change
1505 *
1506 * RETURN : NULL
1507 *
1508 *==========================================================================*/
1509void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1510 uint32_t stream_idx)
1511{
1512 char feature_mask_value[PROPERTY_VALUE_MAX];
1513 cam_feature_mask_t feature_mask;
1514 int args_converted;
1515 int property_len;
1516
1517 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001518#ifdef _LE_CAMERA_
1519 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1520 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1521 property_len = property_get("persist.camera.hal3.feature",
1522 feature_mask_value, swtnr_feature_mask_value);
1523#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001524 property_len = property_get("persist.camera.hal3.feature",
1525 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001526#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001527 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1528 (feature_mask_value[1] == 'x')) {
1529 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1530 } else {
1531 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1532 }
1533 if (1 != args_converted) {
1534 feature_mask = 0;
1535 LOGE("Wrong feature mask %s", feature_mask_value);
1536 return;
1537 }
1538
1539 switch (stream_format) {
1540 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1541 /* Add LLVD to pp feature mask only if video hint is enabled */
1542 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1543 mStreamConfigInfo.postprocess_mask[stream_idx]
1544 |= CAM_QTI_FEATURE_SW_TNR;
1545 LOGH("Added SW TNR to pp feature mask");
1546 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1547 mStreamConfigInfo.postprocess_mask[stream_idx]
1548 |= CAM_QCOM_FEATURE_LLVD;
1549 LOGH("Added LLVD SeeMore to pp feature mask");
1550 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001551 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1552 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1553 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1554 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001555 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1556 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1557 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1558 CAM_QTI_FEATURE_BINNING_CORRECTION;
1559 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001560 break;
1561 }
1562 default:
1563 break;
1564 }
1565 LOGD("PP feature mask %llx",
1566 mStreamConfigInfo.postprocess_mask[stream_idx]);
1567}
1568
1569/*==============================================================================
1570 * FUNCTION : updateFpsInPreviewBuffer
1571 *
1572 * DESCRIPTION: update FPS information in preview buffer.
1573 *
1574 * PARAMETERS :
1575 * @metadata : pointer to metadata buffer
1576 * @frame_number: frame_number to look for in pending buffer list
1577 *
1578 * RETURN : None
1579 *
1580 *==========================================================================*/
1581void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1582 uint32_t frame_number)
1583{
1584 // Mark all pending buffers for this particular request
1585 // with corresponding framerate information
1586 for (List<PendingBuffersInRequest>::iterator req =
1587 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1588 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1589 for(List<PendingBufferInfo>::iterator j =
1590 req->mPendingBufferList.begin();
1591 j != req->mPendingBufferList.end(); j++) {
1592 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1593 if ((req->frame_number == frame_number) &&
1594 (channel->getStreamTypeMask() &
1595 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1596 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1597 CAM_INTF_PARM_FPS_RANGE, metadata) {
1598 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1599 struct private_handle_t *priv_handle =
1600 (struct private_handle_t *)(*(j->buffer));
1601 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1602 }
1603 }
1604 }
1605 }
1606}
1607
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001608/*==============================================================================
1609 * FUNCTION : updateTimeStampInPendingBuffers
1610 *
1611 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1612 * of a frame number
1613 *
1614 * PARAMETERS :
1615 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1616 * @timestamp : timestamp to be set
1617 *
1618 * RETURN : None
1619 *
1620 *==========================================================================*/
1621void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1622 uint32_t frameNumber, nsecs_t timestamp)
1623{
1624 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1625 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1626 if (req->frame_number != frameNumber)
1627 continue;
1628
1629 for (auto k = req->mPendingBufferList.begin();
1630 k != req->mPendingBufferList.end(); k++ ) {
1631 struct private_handle_t *priv_handle =
1632 (struct private_handle_t *) (*(k->buffer));
1633 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1634 }
1635 }
1636 return;
1637}
1638
Thierry Strudel3d639192016-09-09 11:52:26 -07001639/*===========================================================================
1640 * FUNCTION : configureStreams
1641 *
1642 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1643 * and output streams.
1644 *
1645 * PARAMETERS :
1646 * @stream_list : streams to be configured
1647 *
1648 * RETURN :
1649 *
1650 *==========================================================================*/
1651int QCamera3HardwareInterface::configureStreams(
1652 camera3_stream_configuration_t *streamList)
1653{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001654 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001655 int rc = 0;
1656
1657 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001658 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001659 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001660 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001661
1662 return rc;
1663}
1664
1665/*===========================================================================
1666 * FUNCTION : configureStreamsPerfLocked
1667 *
1668 * DESCRIPTION: configureStreams while perfLock is held.
1669 *
1670 * PARAMETERS :
1671 * @stream_list : streams to be configured
1672 *
1673 * RETURN : int32_t type of status
1674 * NO_ERROR -- success
1675 * none-zero failure code
1676 *==========================================================================*/
1677int QCamera3HardwareInterface::configureStreamsPerfLocked(
1678 camera3_stream_configuration_t *streamList)
1679{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001680 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001681 int rc = 0;
1682
1683 // Sanity check stream_list
1684 if (streamList == NULL) {
1685 LOGE("NULL stream configuration");
1686 return BAD_VALUE;
1687 }
1688 if (streamList->streams == NULL) {
1689 LOGE("NULL stream list");
1690 return BAD_VALUE;
1691 }
1692
1693 if (streamList->num_streams < 1) {
1694 LOGE("Bad number of streams requested: %d",
1695 streamList->num_streams);
1696 return BAD_VALUE;
1697 }
1698
1699 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1700 LOGE("Maximum number of streams %d exceeded: %d",
1701 MAX_NUM_STREAMS, streamList->num_streams);
1702 return BAD_VALUE;
1703 }
1704
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001705 rc = validateUsageFlags(streamList);
1706 if (rc != NO_ERROR) {
1707 return rc;
1708 }
1709
Thierry Strudel3d639192016-09-09 11:52:26 -07001710 mOpMode = streamList->operation_mode;
1711 LOGD("mOpMode: %d", mOpMode);
1712
1713 /* first invalidate all the steams in the mStreamList
1714 * if they appear again, they will be validated */
1715 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1716 it != mStreamInfo.end(); it++) {
1717 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1718 if (channel) {
1719 channel->stop();
1720 }
1721 (*it)->status = INVALID;
1722 }
1723
1724 if (mRawDumpChannel) {
1725 mRawDumpChannel->stop();
1726 delete mRawDumpChannel;
1727 mRawDumpChannel = NULL;
1728 }
1729
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001730 if (mHdrPlusRawSrcChannel) {
1731 mHdrPlusRawSrcChannel->stop();
1732 delete mHdrPlusRawSrcChannel;
1733 mHdrPlusRawSrcChannel = NULL;
1734 }
1735
Thierry Strudel3d639192016-09-09 11:52:26 -07001736 if (mSupportChannel)
1737 mSupportChannel->stop();
1738
1739 if (mAnalysisChannel) {
1740 mAnalysisChannel->stop();
1741 }
1742 if (mMetadataChannel) {
1743 /* If content of mStreamInfo is not 0, there is metadata stream */
1744 mMetadataChannel->stop();
1745 }
1746 if (mChannelHandle) {
1747 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1748 mChannelHandle);
1749 LOGD("stopping channel %d", mChannelHandle);
1750 }
1751
1752 pthread_mutex_lock(&mMutex);
1753
1754 // Check state
1755 switch (mState) {
1756 case INITIALIZED:
1757 case CONFIGURED:
1758 case STARTED:
1759 /* valid state */
1760 break;
1761 default:
1762 LOGE("Invalid state %d", mState);
1763 pthread_mutex_unlock(&mMutex);
1764 return -ENODEV;
1765 }
1766
1767 /* Check whether we have video stream */
1768 m_bIs4KVideo = false;
1769 m_bIsVideo = false;
1770 m_bEisSupportedSize = false;
1771 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001772 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001773 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001774 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001775 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001776 uint32_t videoWidth = 0U;
1777 uint32_t videoHeight = 0U;
1778 size_t rawStreamCnt = 0;
1779 size_t stallStreamCnt = 0;
1780 size_t processedStreamCnt = 0;
1781 // Number of streams on ISP encoder path
1782 size_t numStreamsOnEncoder = 0;
1783 size_t numYuv888OnEncoder = 0;
1784 bool bYuv888OverrideJpeg = false;
1785 cam_dimension_t largeYuv888Size = {0, 0};
1786 cam_dimension_t maxViewfinderSize = {0, 0};
1787 bool bJpegExceeds4K = false;
1788 bool bJpegOnEncoder = false;
1789 bool bUseCommonFeatureMask = false;
1790 cam_feature_mask_t commonFeatureMask = 0;
1791 bool bSmallJpegSize = false;
1792 uint32_t width_ratio;
1793 uint32_t height_ratio;
1794 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1795 camera3_stream_t *inputStream = NULL;
1796 bool isJpeg = false;
1797 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001798 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001799 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001800
1801 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1802
1803 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001804 uint8_t eis_prop_set;
1805 uint32_t maxEisWidth = 0;
1806 uint32_t maxEisHeight = 0;
1807
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001808 // Initialize all instant AEC related variables
1809 mInstantAEC = false;
1810 mResetInstantAEC = false;
1811 mInstantAECSettledFrameNumber = 0;
1812 mAecSkipDisplayFrameBound = 0;
1813 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001814 mCurrFeatureState = 0;
1815 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001816
Thierry Strudel3d639192016-09-09 11:52:26 -07001817 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1818
1819 size_t count = IS_TYPE_MAX;
1820 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1821 for (size_t i = 0; i < count; i++) {
1822 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001823 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1824 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001825 break;
1826 }
1827 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001828
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001829 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 maxEisWidth = MAX_EIS_WIDTH;
1831 maxEisHeight = MAX_EIS_HEIGHT;
1832 }
1833
1834 /* EIS setprop control */
1835 char eis_prop[PROPERTY_VALUE_MAX];
1836 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001837 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001838 eis_prop_set = (uint8_t)atoi(eis_prop);
1839
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001840 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001841 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1842
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001843 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1844 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001845
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 /* stream configurations */
1847 for (size_t i = 0; i < streamList->num_streams; i++) {
1848 camera3_stream_t *newStream = streamList->streams[i];
1849 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1850 "height = %d, rotation = %d, usage = 0x%x",
1851 i, newStream->stream_type, newStream->format,
1852 newStream->width, newStream->height, newStream->rotation,
1853 newStream->usage);
1854 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1855 newStream->stream_type == CAMERA3_STREAM_INPUT){
1856 isZsl = true;
1857 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001858 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1859 IS_USAGE_PREVIEW(newStream->usage)) {
1860 isPreview = true;
1861 }
1862
Thierry Strudel3d639192016-09-09 11:52:26 -07001863 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1864 inputStream = newStream;
1865 }
1866
Emilian Peev7650c122017-01-19 08:24:33 -08001867 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1868 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001869 isJpeg = true;
1870 jpegSize.width = newStream->width;
1871 jpegSize.height = newStream->height;
1872 if (newStream->width > VIDEO_4K_WIDTH ||
1873 newStream->height > VIDEO_4K_HEIGHT)
1874 bJpegExceeds4K = true;
1875 }
1876
1877 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1878 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1879 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001880 // In HAL3 we can have multiple different video streams.
1881 // The variables video width and height are used below as
1882 // dimensions of the biggest of them
1883 if (videoWidth < newStream->width ||
1884 videoHeight < newStream->height) {
1885 videoWidth = newStream->width;
1886 videoHeight = newStream->height;
1887 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001888 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1889 (VIDEO_4K_HEIGHT <= newStream->height)) {
1890 m_bIs4KVideo = true;
1891 }
1892 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1893 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001894
Thierry Strudel3d639192016-09-09 11:52:26 -07001895 }
1896 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1897 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1898 switch (newStream->format) {
1899 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001900 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1901 depthPresent = true;
1902 break;
1903 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001904 stallStreamCnt++;
1905 if (isOnEncoder(maxViewfinderSize, newStream->width,
1906 newStream->height)) {
1907 numStreamsOnEncoder++;
1908 bJpegOnEncoder = true;
1909 }
1910 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1911 newStream->width);
1912 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1913 newStream->height);;
1914 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1915 "FATAL: max_downscale_factor cannot be zero and so assert");
1916 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1917 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1918 LOGH("Setting small jpeg size flag to true");
1919 bSmallJpegSize = true;
1920 }
1921 break;
1922 case HAL_PIXEL_FORMAT_RAW10:
1923 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1924 case HAL_PIXEL_FORMAT_RAW16:
1925 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001926 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1927 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1928 pdStatCount++;
1929 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001930 break;
1931 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1932 processedStreamCnt++;
1933 if (isOnEncoder(maxViewfinderSize, newStream->width,
1934 newStream->height)) {
1935 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1936 !IS_USAGE_ZSL(newStream->usage)) {
1937 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1938 }
1939 numStreamsOnEncoder++;
1940 }
1941 break;
1942 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1943 processedStreamCnt++;
1944 if (isOnEncoder(maxViewfinderSize, newStream->width,
1945 newStream->height)) {
1946 // If Yuv888 size is not greater than 4K, set feature mask
1947 // to SUPERSET so that it support concurrent request on
1948 // YUV and JPEG.
1949 if (newStream->width <= VIDEO_4K_WIDTH &&
1950 newStream->height <= VIDEO_4K_HEIGHT) {
1951 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1952 }
1953 numStreamsOnEncoder++;
1954 numYuv888OnEncoder++;
1955 largeYuv888Size.width = newStream->width;
1956 largeYuv888Size.height = newStream->height;
1957 }
1958 break;
1959 default:
1960 processedStreamCnt++;
1961 if (isOnEncoder(maxViewfinderSize, newStream->width,
1962 newStream->height)) {
1963 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1964 numStreamsOnEncoder++;
1965 }
1966 break;
1967 }
1968
1969 }
1970 }
1971
1972 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1973 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1974 !m_bIsVideo) {
1975 m_bEisEnable = false;
1976 }
1977
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001978 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1979 pthread_mutex_unlock(&mMutex);
1980 return -EINVAL;
1981 }
1982
Thierry Strudel54dc9782017-02-15 12:12:10 -08001983 uint8_t forceEnableTnr = 0;
1984 char tnr_prop[PROPERTY_VALUE_MAX];
1985 memset(tnr_prop, 0, sizeof(tnr_prop));
1986 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1987 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1988
Thierry Strudel3d639192016-09-09 11:52:26 -07001989 /* Logic to enable/disable TNR based on specific config size/etc.*/
1990 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001991 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1992 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001993 else if (forceEnableTnr)
1994 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001995
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001996 char videoHdrProp[PROPERTY_VALUE_MAX];
1997 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1998 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1999 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2000
2001 if (hdr_mode_prop == 1 && m_bIsVideo &&
2002 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2003 m_bVideoHdrEnabled = true;
2004 else
2005 m_bVideoHdrEnabled = false;
2006
2007
Thierry Strudel3d639192016-09-09 11:52:26 -07002008 /* Check if num_streams is sane */
2009 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2010 rawStreamCnt > MAX_RAW_STREAMS ||
2011 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2012 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2013 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2014 pthread_mutex_unlock(&mMutex);
2015 return -EINVAL;
2016 }
2017 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002018 if (isZsl && m_bIs4KVideo) {
2019 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002020 pthread_mutex_unlock(&mMutex);
2021 return -EINVAL;
2022 }
2023 /* Check if stream sizes are sane */
2024 if (numStreamsOnEncoder > 2) {
2025 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2026 pthread_mutex_unlock(&mMutex);
2027 return -EINVAL;
2028 } else if (1 < numStreamsOnEncoder){
2029 bUseCommonFeatureMask = true;
2030 LOGH("Multiple streams above max viewfinder size, common mask needed");
2031 }
2032
2033 /* Check if BLOB size is greater than 4k in 4k recording case */
2034 if (m_bIs4KVideo && bJpegExceeds4K) {
2035 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2036 pthread_mutex_unlock(&mMutex);
2037 return -EINVAL;
2038 }
2039
Emilian Peev7650c122017-01-19 08:24:33 -08002040 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2041 depthPresent) {
2042 LOGE("HAL doesn't support depth streams in HFR mode!");
2043 pthread_mutex_unlock(&mMutex);
2044 return -EINVAL;
2045 }
2046
Thierry Strudel3d639192016-09-09 11:52:26 -07002047 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2048 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2049 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2050 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2051 // configurations:
2052 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2053 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2054 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2055 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2056 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2057 __func__);
2058 pthread_mutex_unlock(&mMutex);
2059 return -EINVAL;
2060 }
2061
2062 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2063 // the YUV stream's size is greater or equal to the JPEG size, set common
2064 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2065 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2066 jpegSize.width, jpegSize.height) &&
2067 largeYuv888Size.width > jpegSize.width &&
2068 largeYuv888Size.height > jpegSize.height) {
2069 bYuv888OverrideJpeg = true;
2070 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2071 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072 }
2073
2074 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2075 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2076 commonFeatureMask);
2077 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2078 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2079
2080 rc = validateStreamDimensions(streamList);
2081 if (rc == NO_ERROR) {
2082 rc = validateStreamRotations(streamList);
2083 }
2084 if (rc != NO_ERROR) {
2085 LOGE("Invalid stream configuration requested!");
2086 pthread_mutex_unlock(&mMutex);
2087 return rc;
2088 }
2089
Emilian Peev0f3c3162017-03-15 12:57:46 +00002090 if (1 < pdStatCount) {
2091 LOGE("HAL doesn't support multiple PD streams");
2092 pthread_mutex_unlock(&mMutex);
2093 return -EINVAL;
2094 }
2095
2096 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2097 (1 == pdStatCount)) {
2098 LOGE("HAL doesn't support PD streams in HFR mode!");
2099 pthread_mutex_unlock(&mMutex);
2100 return -EINVAL;
2101 }
2102
Thierry Strudel3d639192016-09-09 11:52:26 -07002103 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2104 for (size_t i = 0; i < streamList->num_streams; i++) {
2105 camera3_stream_t *newStream = streamList->streams[i];
2106 LOGH("newStream type = %d, stream format = %d "
2107 "stream size : %d x %d, stream rotation = %d",
2108 newStream->stream_type, newStream->format,
2109 newStream->width, newStream->height, newStream->rotation);
2110 //if the stream is in the mStreamList validate it
2111 bool stream_exists = false;
2112 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2113 it != mStreamInfo.end(); it++) {
2114 if ((*it)->stream == newStream) {
2115 QCamera3ProcessingChannel *channel =
2116 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2117 stream_exists = true;
2118 if (channel)
2119 delete channel;
2120 (*it)->status = VALID;
2121 (*it)->stream->priv = NULL;
2122 (*it)->channel = NULL;
2123 }
2124 }
2125 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2126 //new stream
2127 stream_info_t* stream_info;
2128 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2129 if (!stream_info) {
2130 LOGE("Could not allocate stream info");
2131 rc = -ENOMEM;
2132 pthread_mutex_unlock(&mMutex);
2133 return rc;
2134 }
2135 stream_info->stream = newStream;
2136 stream_info->status = VALID;
2137 stream_info->channel = NULL;
2138 mStreamInfo.push_back(stream_info);
2139 }
2140 /* Covers Opaque ZSL and API1 F/W ZSL */
2141 if (IS_USAGE_ZSL(newStream->usage)
2142 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2143 if (zslStream != NULL) {
2144 LOGE("Multiple input/reprocess streams requested!");
2145 pthread_mutex_unlock(&mMutex);
2146 return BAD_VALUE;
2147 }
2148 zslStream = newStream;
2149 }
2150 /* Covers YUV reprocess */
2151 if (inputStream != NULL) {
2152 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2153 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2154 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2155 && inputStream->width == newStream->width
2156 && inputStream->height == newStream->height) {
2157 if (zslStream != NULL) {
2158 /* This scenario indicates multiple YUV streams with same size
2159 * as input stream have been requested, since zsl stream handle
2160 * is solely use for the purpose of overriding the size of streams
2161 * which share h/w streams we will just make a guess here as to
2162 * which of the stream is a ZSL stream, this will be refactored
2163 * once we make generic logic for streams sharing encoder output
2164 */
2165 LOGH("Warning, Multiple ip/reprocess streams requested!");
2166 }
2167 zslStream = newStream;
2168 }
2169 }
2170 }
2171
2172 /* If a zsl stream is set, we know that we have configured at least one input or
2173 bidirectional stream */
2174 if (NULL != zslStream) {
2175 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2176 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2177 mInputStreamInfo.format = zslStream->format;
2178 mInputStreamInfo.usage = zslStream->usage;
2179 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2180 mInputStreamInfo.dim.width,
2181 mInputStreamInfo.dim.height,
2182 mInputStreamInfo.format, mInputStreamInfo.usage);
2183 }
2184
2185 cleanAndSortStreamInfo();
2186 if (mMetadataChannel) {
2187 delete mMetadataChannel;
2188 mMetadataChannel = NULL;
2189 }
2190 if (mSupportChannel) {
2191 delete mSupportChannel;
2192 mSupportChannel = NULL;
2193 }
2194
2195 if (mAnalysisChannel) {
2196 delete mAnalysisChannel;
2197 mAnalysisChannel = NULL;
2198 }
2199
2200 if (mDummyBatchChannel) {
2201 delete mDummyBatchChannel;
2202 mDummyBatchChannel = NULL;
2203 }
2204
Emilian Peev7650c122017-01-19 08:24:33 -08002205 if (mDepthChannel) {
2206 mDepthChannel = NULL;
2207 }
2208
Thierry Strudel2896d122017-02-23 19:18:03 -08002209 char is_type_value[PROPERTY_VALUE_MAX];
2210 property_get("persist.camera.is_type", is_type_value, "4");
2211 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2212
Thierry Strudel3d639192016-09-09 11:52:26 -07002213 //Create metadata channel and initialize it
2214 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2215 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2216 gCamCapability[mCameraId]->color_arrangement);
2217 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2218 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002219 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002220 if (mMetadataChannel == NULL) {
2221 LOGE("failed to allocate metadata channel");
2222 rc = -ENOMEM;
2223 pthread_mutex_unlock(&mMutex);
2224 return rc;
2225 }
2226 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2227 if (rc < 0) {
2228 LOGE("metadata channel initialization failed");
2229 delete mMetadataChannel;
2230 mMetadataChannel = NULL;
2231 pthread_mutex_unlock(&mMutex);
2232 return rc;
2233 }
2234
Thierry Strudel2896d122017-02-23 19:18:03 -08002235 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002236 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002237 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002238 // Keep track of preview/video streams indices.
2239 // There could be more than one preview streams, but only one video stream.
2240 int32_t video_stream_idx = -1;
2241 int32_t preview_stream_idx[streamList->num_streams];
2242 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002243 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2244 /* Allocate channel objects for the requested streams */
2245 for (size_t i = 0; i < streamList->num_streams; i++) {
2246 camera3_stream_t *newStream = streamList->streams[i];
2247 uint32_t stream_usage = newStream->usage;
2248 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2249 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2250 struct camera_info *p_info = NULL;
2251 pthread_mutex_lock(&gCamLock);
2252 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2253 pthread_mutex_unlock(&gCamLock);
2254 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2255 || IS_USAGE_ZSL(newStream->usage)) &&
2256 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002257 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002259 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2260 if (bUseCommonFeatureMask)
2261 zsl_ppmask = commonFeatureMask;
2262 else
2263 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002264 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002265 if (numStreamsOnEncoder > 0)
2266 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2267 else
2268 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002269 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002270 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002271 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002272 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002273 LOGH("Input stream configured, reprocess config");
2274 } else {
2275 //for non zsl streams find out the format
2276 switch (newStream->format) {
2277 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2278 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002279 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002280 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2281 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2282 /* add additional features to pp feature mask */
2283 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2284 mStreamConfigInfo.num_streams);
2285
2286 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2287 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2288 CAM_STREAM_TYPE_VIDEO;
2289 if (m_bTnrEnabled && m_bTnrVideo) {
2290 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2291 CAM_QCOM_FEATURE_CPP_TNR;
2292 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2293 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2294 ~CAM_QCOM_FEATURE_CDS;
2295 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002296 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2297 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2298 CAM_QTI_FEATURE_PPEISCORE;
2299 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002300 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002301 } else {
2302 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2303 CAM_STREAM_TYPE_PREVIEW;
2304 if (m_bTnrEnabled && m_bTnrPreview) {
2305 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2306 CAM_QCOM_FEATURE_CPP_TNR;
2307 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2308 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2309 ~CAM_QCOM_FEATURE_CDS;
2310 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002311 if(!m_bSwTnrPreview) {
2312 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2313 ~CAM_QTI_FEATURE_SW_TNR;
2314 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002315 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002316 padding_info.width_padding = mSurfaceStridePadding;
2317 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002318 previewSize.width = (int32_t)newStream->width;
2319 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002320 }
2321 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2322 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2323 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2324 newStream->height;
2325 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2326 newStream->width;
2327 }
2328 }
2329 break;
2330 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002331 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002332 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2333 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2334 if (bUseCommonFeatureMask)
2335 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2336 commonFeatureMask;
2337 else
2338 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2339 CAM_QCOM_FEATURE_NONE;
2340 } else {
2341 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2342 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2343 }
2344 break;
2345 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002346 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002347 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2348 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2349 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2350 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2351 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002352 /* Remove rotation if it is not supported
2353 for 4K LiveVideo snapshot case (online processing) */
2354 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2355 CAM_QCOM_FEATURE_ROTATION)) {
2356 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2357 &= ~CAM_QCOM_FEATURE_ROTATION;
2358 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002359 } else {
2360 if (bUseCommonFeatureMask &&
2361 isOnEncoder(maxViewfinderSize, newStream->width,
2362 newStream->height)) {
2363 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2364 } else {
2365 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2366 }
2367 }
2368 if (isZsl) {
2369 if (zslStream) {
2370 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2371 (int32_t)zslStream->width;
2372 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2373 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002374 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2375 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002376 } else {
2377 LOGE("Error, No ZSL stream identified");
2378 pthread_mutex_unlock(&mMutex);
2379 return -EINVAL;
2380 }
2381 } else if (m_bIs4KVideo) {
2382 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2383 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2384 } else if (bYuv888OverrideJpeg) {
2385 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2386 (int32_t)largeYuv888Size.width;
2387 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2388 (int32_t)largeYuv888Size.height;
2389 }
2390 break;
2391 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2392 case HAL_PIXEL_FORMAT_RAW16:
2393 case HAL_PIXEL_FORMAT_RAW10:
2394 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2395 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2396 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002397 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2398 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2399 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2400 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2401 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2402 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2403 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2404 gCamCapability[mCameraId]->dt[mPDIndex];
2405 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2406 gCamCapability[mCameraId]->vc[mPDIndex];
2407 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002408 break;
2409 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002410 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002411 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2412 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2413 break;
2414 }
2415 }
2416
2417 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2418 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2419 gCamCapability[mCameraId]->color_arrangement);
2420
2421 if (newStream->priv == NULL) {
2422 //New stream, construct channel
2423 switch (newStream->stream_type) {
2424 case CAMERA3_STREAM_INPUT:
2425 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2426 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2427 break;
2428 case CAMERA3_STREAM_BIDIRECTIONAL:
2429 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2430 GRALLOC_USAGE_HW_CAMERA_WRITE;
2431 break;
2432 case CAMERA3_STREAM_OUTPUT:
2433 /* For video encoding stream, set read/write rarely
2434 * flag so that they may be set to un-cached */
2435 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2436 newStream->usage |=
2437 (GRALLOC_USAGE_SW_READ_RARELY |
2438 GRALLOC_USAGE_SW_WRITE_RARELY |
2439 GRALLOC_USAGE_HW_CAMERA_WRITE);
2440 else if (IS_USAGE_ZSL(newStream->usage))
2441 {
2442 LOGD("ZSL usage flag skipping");
2443 }
2444 else if (newStream == zslStream
2445 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2446 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2447 } else
2448 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2449 break;
2450 default:
2451 LOGE("Invalid stream_type %d", newStream->stream_type);
2452 break;
2453 }
2454
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002455 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002456 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2457 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2458 QCamera3ProcessingChannel *channel = NULL;
2459 switch (newStream->format) {
2460 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2461 if ((newStream->usage &
2462 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2463 (streamList->operation_mode ==
2464 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2465 ) {
2466 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2467 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002468 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002469 this,
2470 newStream,
2471 (cam_stream_type_t)
2472 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2473 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2474 mMetadataChannel,
2475 0); //heap buffers are not required for HFR video channel
2476 if (channel == NULL) {
2477 LOGE("allocation of channel failed");
2478 pthread_mutex_unlock(&mMutex);
2479 return -ENOMEM;
2480 }
2481 //channel->getNumBuffers() will return 0 here so use
2482 //MAX_INFLIGH_HFR_REQUESTS
2483 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2484 newStream->priv = channel;
2485 LOGI("num video buffers in HFR mode: %d",
2486 MAX_INFLIGHT_HFR_REQUESTS);
2487 } else {
2488 /* Copy stream contents in HFR preview only case to create
2489 * dummy batch channel so that sensor streaming is in
2490 * HFR mode */
2491 if (!m_bIsVideo && (streamList->operation_mode ==
2492 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2493 mDummyBatchStream = *newStream;
2494 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002495 int bufferCount = MAX_INFLIGHT_REQUESTS;
2496 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2497 CAM_STREAM_TYPE_VIDEO) {
2498 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2499 bufferCount = MAX_VIDEO_BUFFERS;
2500 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002501 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2502 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002503 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002504 this,
2505 newStream,
2506 (cam_stream_type_t)
2507 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2508 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2509 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002510 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002511 if (channel == NULL) {
2512 LOGE("allocation of channel failed");
2513 pthread_mutex_unlock(&mMutex);
2514 return -ENOMEM;
2515 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002516 /* disable UBWC for preview, though supported,
2517 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002518 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002519 (previewSize.width == (int32_t)videoWidth)&&
2520 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002521 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002522 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002523 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002524 newStream->max_buffers = channel->getNumBuffers();
2525 newStream->priv = channel;
2526 }
2527 break;
2528 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2529 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2530 mChannelHandle,
2531 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002532 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002533 this,
2534 newStream,
2535 (cam_stream_type_t)
2536 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2538 mMetadataChannel);
2539 if (channel == NULL) {
2540 LOGE("allocation of YUV channel failed");
2541 pthread_mutex_unlock(&mMutex);
2542 return -ENOMEM;
2543 }
2544 newStream->max_buffers = channel->getNumBuffers();
2545 newStream->priv = channel;
2546 break;
2547 }
2548 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2549 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002550 case HAL_PIXEL_FORMAT_RAW10: {
2551 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2552 (HAL_DATASPACE_DEPTH != newStream->data_space))
2553 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002554 mRawChannel = new QCamera3RawChannel(
2555 mCameraHandle->camera_handle, mChannelHandle,
2556 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002557 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002558 this, newStream,
2559 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002560 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002561 if (mRawChannel == NULL) {
2562 LOGE("allocation of raw channel failed");
2563 pthread_mutex_unlock(&mMutex);
2564 return -ENOMEM;
2565 }
2566 newStream->max_buffers = mRawChannel->getNumBuffers();
2567 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2568 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002569 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002570 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002571 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2572 mDepthChannel = new QCamera3DepthChannel(
2573 mCameraHandle->camera_handle, mChannelHandle,
2574 mCameraHandle->ops, NULL, NULL, &padding_info,
2575 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2576 mMetadataChannel);
2577 if (NULL == mDepthChannel) {
2578 LOGE("Allocation of depth channel failed");
2579 pthread_mutex_unlock(&mMutex);
2580 return NO_MEMORY;
2581 }
2582 newStream->priv = mDepthChannel;
2583 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2584 } else {
2585 // Max live snapshot inflight buffer is 1. This is to mitigate
2586 // frame drop issues for video snapshot. The more buffers being
2587 // allocated, the more frame drops there are.
2588 mPictureChannel = new QCamera3PicChannel(
2589 mCameraHandle->camera_handle, mChannelHandle,
2590 mCameraHandle->ops, captureResultCb,
2591 setBufferErrorStatus, &padding_info, this, newStream,
2592 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2593 m_bIs4KVideo, isZsl, mMetadataChannel,
2594 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2595 if (mPictureChannel == NULL) {
2596 LOGE("allocation of channel failed");
2597 pthread_mutex_unlock(&mMutex);
2598 return -ENOMEM;
2599 }
2600 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2601 newStream->max_buffers = mPictureChannel->getNumBuffers();
2602 mPictureChannel->overrideYuvSize(
2603 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2604 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002605 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002606 break;
2607
2608 default:
2609 LOGE("not a supported format 0x%x", newStream->format);
2610 break;
2611 }
2612 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2613 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2614 } else {
2615 LOGE("Error, Unknown stream type");
2616 pthread_mutex_unlock(&mMutex);
2617 return -EINVAL;
2618 }
2619
2620 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002621 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2622 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002623 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002624 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002625 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2626 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2627 }
2628 }
2629
2630 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2631 it != mStreamInfo.end(); it++) {
2632 if ((*it)->stream == newStream) {
2633 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2634 break;
2635 }
2636 }
2637 } else {
2638 // Channel already exists for this stream
2639 // Do nothing for now
2640 }
2641 padding_info = gCamCapability[mCameraId]->padding_info;
2642
Emilian Peev7650c122017-01-19 08:24:33 -08002643 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002644 * since there is no real stream associated with it
2645 */
Emilian Peev7650c122017-01-19 08:24:33 -08002646 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002647 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2648 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002649 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002650 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002651 }
2652
Binhao Lincdb362a2017-04-20 13:31:54 -07002653 // By default, preview stream TNR is disabled.
2654 // Enable TNR to the preview stream if all conditions below are satisfied:
2655 // 1. resolution <= 1080p.
2656 // 2. preview resolution == video resolution.
2657 // 3. video stream TNR is enabled.
2658 // 4. EIS2.0
2659 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2660 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2661 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2662 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2663 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2664 video_stream->width == preview_stream->width &&
2665 video_stream->height == preview_stream->height) {
2666 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2667 CAM_QCOM_FEATURE_CPP_TNR;
2668 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2669 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2670 ~CAM_QCOM_FEATURE_CDS;
2671 }
2672 }
2673
Thierry Strudel2896d122017-02-23 19:18:03 -08002674 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2675 onlyRaw = false;
2676 }
2677
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002678 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002679 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002680 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002681 cam_analysis_info_t analysisInfo;
2682 int32_t ret = NO_ERROR;
2683 ret = mCommon.getAnalysisInfo(
2684 FALSE,
2685 analysisFeatureMask,
2686 &analysisInfo);
2687 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002688 cam_color_filter_arrangement_t analysis_color_arrangement =
2689 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2690 CAM_FILTER_ARRANGEMENT_Y :
2691 gCamCapability[mCameraId]->color_arrangement);
2692 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2693 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002694 cam_dimension_t analysisDim;
2695 analysisDim = mCommon.getMatchingDimension(previewSize,
2696 analysisInfo.analysis_recommended_res);
2697
2698 mAnalysisChannel = new QCamera3SupportChannel(
2699 mCameraHandle->camera_handle,
2700 mChannelHandle,
2701 mCameraHandle->ops,
2702 &analysisInfo.analysis_padding_info,
2703 analysisFeatureMask,
2704 CAM_STREAM_TYPE_ANALYSIS,
2705 &analysisDim,
2706 (analysisInfo.analysis_format
2707 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2708 : CAM_FORMAT_YUV_420_NV21),
2709 analysisInfo.hw_analysis_supported,
2710 gCamCapability[mCameraId]->color_arrangement,
2711 this,
2712 0); // force buffer count to 0
2713 } else {
2714 LOGW("getAnalysisInfo failed, ret = %d", ret);
2715 }
2716 if (!mAnalysisChannel) {
2717 LOGW("Analysis channel cannot be created");
2718 }
2719 }
2720
Thierry Strudel3d639192016-09-09 11:52:26 -07002721 //RAW DUMP channel
2722 if (mEnableRawDump && isRawStreamRequested == false){
2723 cam_dimension_t rawDumpSize;
2724 rawDumpSize = getMaxRawSize(mCameraId);
2725 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2726 setPAAFSupport(rawDumpFeatureMask,
2727 CAM_STREAM_TYPE_RAW,
2728 gCamCapability[mCameraId]->color_arrangement);
2729 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2730 mChannelHandle,
2731 mCameraHandle->ops,
2732 rawDumpSize,
2733 &padding_info,
2734 this, rawDumpFeatureMask);
2735 if (!mRawDumpChannel) {
2736 LOGE("Raw Dump channel cannot be created");
2737 pthread_mutex_unlock(&mMutex);
2738 return -ENOMEM;
2739 }
2740 }
2741
Thierry Strudel3d639192016-09-09 11:52:26 -07002742 if (mAnalysisChannel) {
2743 cam_analysis_info_t analysisInfo;
2744 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2745 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2746 CAM_STREAM_TYPE_ANALYSIS;
2747 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2748 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002749 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002750 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2751 &analysisInfo);
2752 if (rc != NO_ERROR) {
2753 LOGE("getAnalysisInfo failed, ret = %d", rc);
2754 pthread_mutex_unlock(&mMutex);
2755 return rc;
2756 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002757 cam_color_filter_arrangement_t analysis_color_arrangement =
2758 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2759 CAM_FILTER_ARRANGEMENT_Y :
2760 gCamCapability[mCameraId]->color_arrangement);
2761 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2762 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2763 analysis_color_arrangement);
2764
Thierry Strudel3d639192016-09-09 11:52:26 -07002765 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002766 mCommon.getMatchingDimension(previewSize,
2767 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002768 mStreamConfigInfo.num_streams++;
2769 }
2770
Thierry Strudel2896d122017-02-23 19:18:03 -08002771 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002772 cam_analysis_info_t supportInfo;
2773 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2774 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2775 setPAAFSupport(callbackFeatureMask,
2776 CAM_STREAM_TYPE_CALLBACK,
2777 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002778 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002779 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002780 if (ret != NO_ERROR) {
2781 /* Ignore the error for Mono camera
2782 * because the PAAF bit mask is only set
2783 * for CAM_STREAM_TYPE_ANALYSIS stream type
2784 */
2785 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2786 LOGW("getAnalysisInfo failed, ret = %d", ret);
2787 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002788 }
2789 mSupportChannel = new QCamera3SupportChannel(
2790 mCameraHandle->camera_handle,
2791 mChannelHandle,
2792 mCameraHandle->ops,
2793 &gCamCapability[mCameraId]->padding_info,
2794 callbackFeatureMask,
2795 CAM_STREAM_TYPE_CALLBACK,
2796 &QCamera3SupportChannel::kDim,
2797 CAM_FORMAT_YUV_420_NV21,
2798 supportInfo.hw_analysis_supported,
2799 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002800 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002801 if (!mSupportChannel) {
2802 LOGE("dummy channel cannot be created");
2803 pthread_mutex_unlock(&mMutex);
2804 return -ENOMEM;
2805 }
2806 }
2807
2808 if (mSupportChannel) {
2809 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2810 QCamera3SupportChannel::kDim;
2811 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2812 CAM_STREAM_TYPE_CALLBACK;
2813 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2814 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2815 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2816 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2817 gCamCapability[mCameraId]->color_arrangement);
2818 mStreamConfigInfo.num_streams++;
2819 }
2820
2821 if (mRawDumpChannel) {
2822 cam_dimension_t rawSize;
2823 rawSize = getMaxRawSize(mCameraId);
2824 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2825 rawSize;
2826 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2827 CAM_STREAM_TYPE_RAW;
2828 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2829 CAM_QCOM_FEATURE_NONE;
2830 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2831 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2832 gCamCapability[mCameraId]->color_arrangement);
2833 mStreamConfigInfo.num_streams++;
2834 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002835
2836 if (mHdrPlusRawSrcChannel) {
2837 cam_dimension_t rawSize;
2838 rawSize = getMaxRawSize(mCameraId);
2839 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2840 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2841 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2842 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2843 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2844 gCamCapability[mCameraId]->color_arrangement);
2845 mStreamConfigInfo.num_streams++;
2846 }
2847
Thierry Strudel3d639192016-09-09 11:52:26 -07002848 /* In HFR mode, if video stream is not added, create a dummy channel so that
2849 * ISP can create a batch mode even for preview only case. This channel is
2850 * never 'start'ed (no stream-on), it is only 'initialized' */
2851 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2852 !m_bIsVideo) {
2853 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2854 setPAAFSupport(dummyFeatureMask,
2855 CAM_STREAM_TYPE_VIDEO,
2856 gCamCapability[mCameraId]->color_arrangement);
2857 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2858 mChannelHandle,
2859 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002860 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002861 this,
2862 &mDummyBatchStream,
2863 CAM_STREAM_TYPE_VIDEO,
2864 dummyFeatureMask,
2865 mMetadataChannel);
2866 if (NULL == mDummyBatchChannel) {
2867 LOGE("creation of mDummyBatchChannel failed."
2868 "Preview will use non-hfr sensor mode ");
2869 }
2870 }
2871 if (mDummyBatchChannel) {
2872 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2873 mDummyBatchStream.width;
2874 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2875 mDummyBatchStream.height;
2876 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2877 CAM_STREAM_TYPE_VIDEO;
2878 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2879 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2880 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2881 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2882 gCamCapability[mCameraId]->color_arrangement);
2883 mStreamConfigInfo.num_streams++;
2884 }
2885
2886 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2887 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002888 m_bIs4KVideo ? 0 :
2889 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002890
2891 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2892 for (pendingRequestIterator i = mPendingRequestsList.begin();
2893 i != mPendingRequestsList.end();) {
2894 i = erasePendingRequest(i);
2895 }
2896 mPendingFrameDropList.clear();
2897 // Initialize/Reset the pending buffers list
2898 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2899 req.mPendingBufferList.clear();
2900 }
2901 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2902
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 mCurJpegMeta.clear();
2904 //Get min frame duration for this streams configuration
2905 deriveMinFrameDuration();
2906
Chien-Yu Chenee335912017-02-09 17:53:20 -08002907 mFirstPreviewIntentSeen = false;
2908
2909 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002910 {
2911 Mutex::Autolock l(gHdrPlusClientLock);
2912 disableHdrPlusModeLocked();
2913 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002914
Thierry Strudel3d639192016-09-09 11:52:26 -07002915 // Update state
2916 mState = CONFIGURED;
2917
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002918 mFirstMetadataCallback = true;
2919
Thierry Strudel3d639192016-09-09 11:52:26 -07002920 pthread_mutex_unlock(&mMutex);
2921
2922 return rc;
2923}
2924
2925/*===========================================================================
2926 * FUNCTION : validateCaptureRequest
2927 *
2928 * DESCRIPTION: validate a capture request from camera service
2929 *
2930 * PARAMETERS :
2931 * @request : request from framework to process
2932 *
2933 * RETURN :
2934 *
2935 *==========================================================================*/
2936int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002937 camera3_capture_request_t *request,
2938 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002939{
2940 ssize_t idx = 0;
2941 const camera3_stream_buffer_t *b;
2942 CameraMetadata meta;
2943
2944 /* Sanity check the request */
2945 if (request == NULL) {
2946 LOGE("NULL capture request");
2947 return BAD_VALUE;
2948 }
2949
2950 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2951 /*settings cannot be null for the first request*/
2952 return BAD_VALUE;
2953 }
2954
2955 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002956 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2957 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002958 LOGE("Request %d: No output buffers provided!",
2959 __FUNCTION__, frameNumber);
2960 return BAD_VALUE;
2961 }
2962 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2963 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2964 request->num_output_buffers, MAX_NUM_STREAMS);
2965 return BAD_VALUE;
2966 }
2967 if (request->input_buffer != NULL) {
2968 b = request->input_buffer;
2969 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2970 LOGE("Request %d: Buffer %ld: Status not OK!",
2971 frameNumber, (long)idx);
2972 return BAD_VALUE;
2973 }
2974 if (b->release_fence != -1) {
2975 LOGE("Request %d: Buffer %ld: Has a release fence!",
2976 frameNumber, (long)idx);
2977 return BAD_VALUE;
2978 }
2979 if (b->buffer == NULL) {
2980 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2981 frameNumber, (long)idx);
2982 return BAD_VALUE;
2983 }
2984 }
2985
2986 // Validate all buffers
2987 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002988 if (b == NULL) {
2989 return BAD_VALUE;
2990 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002991 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002992 QCamera3ProcessingChannel *channel =
2993 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2994 if (channel == NULL) {
2995 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2996 frameNumber, (long)idx);
2997 return BAD_VALUE;
2998 }
2999 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3000 LOGE("Request %d: Buffer %ld: Status not OK!",
3001 frameNumber, (long)idx);
3002 return BAD_VALUE;
3003 }
3004 if (b->release_fence != -1) {
3005 LOGE("Request %d: Buffer %ld: Has a release fence!",
3006 frameNumber, (long)idx);
3007 return BAD_VALUE;
3008 }
3009 if (b->buffer == NULL) {
3010 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3011 frameNumber, (long)idx);
3012 return BAD_VALUE;
3013 }
3014 if (*(b->buffer) == NULL) {
3015 LOGE("Request %d: Buffer %ld: NULL private handle!",
3016 frameNumber, (long)idx);
3017 return BAD_VALUE;
3018 }
3019 idx++;
3020 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003021 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003022 return NO_ERROR;
3023}
3024
3025/*===========================================================================
3026 * FUNCTION : deriveMinFrameDuration
3027 *
3028 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3029 * on currently configured streams.
3030 *
3031 * PARAMETERS : NONE
3032 *
3033 * RETURN : NONE
3034 *
3035 *==========================================================================*/
3036void QCamera3HardwareInterface::deriveMinFrameDuration()
3037{
3038 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3039
3040 maxJpegDim = 0;
3041 maxProcessedDim = 0;
3042 maxRawDim = 0;
3043
3044 // Figure out maximum jpeg, processed, and raw dimensions
3045 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3046 it != mStreamInfo.end(); it++) {
3047
3048 // Input stream doesn't have valid stream_type
3049 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3050 continue;
3051
3052 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3053 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3054 if (dimension > maxJpegDim)
3055 maxJpegDim = dimension;
3056 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3057 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3058 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3059 if (dimension > maxRawDim)
3060 maxRawDim = dimension;
3061 } else {
3062 if (dimension > maxProcessedDim)
3063 maxProcessedDim = dimension;
3064 }
3065 }
3066
3067 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3068 MAX_SIZES_CNT);
3069
3070 //Assume all jpeg dimensions are in processed dimensions.
3071 if (maxJpegDim > maxProcessedDim)
3072 maxProcessedDim = maxJpegDim;
3073 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3074 if (maxProcessedDim > maxRawDim) {
3075 maxRawDim = INT32_MAX;
3076
3077 for (size_t i = 0; i < count; i++) {
3078 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3079 gCamCapability[mCameraId]->raw_dim[i].height;
3080 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3081 maxRawDim = dimension;
3082 }
3083 }
3084
3085 //Find minimum durations for processed, jpeg, and raw
3086 for (size_t i = 0; i < count; i++) {
3087 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3088 gCamCapability[mCameraId]->raw_dim[i].height) {
3089 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3090 break;
3091 }
3092 }
3093 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3094 for (size_t i = 0; i < count; i++) {
3095 if (maxProcessedDim ==
3096 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3097 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3098 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3099 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3100 break;
3101 }
3102 }
3103}
3104
3105/*===========================================================================
3106 * FUNCTION : getMinFrameDuration
3107 *
3108 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3109 * and current request configuration.
3110 *
3111 * PARAMETERS : @request: requset sent by the frameworks
3112 *
3113 * RETURN : min farme duration for a particular request
3114 *
3115 *==========================================================================*/
3116int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3117{
3118 bool hasJpegStream = false;
3119 bool hasRawStream = false;
3120 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3121 const camera3_stream_t *stream = request->output_buffers[i].stream;
3122 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3123 hasJpegStream = true;
3124 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3125 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3126 stream->format == HAL_PIXEL_FORMAT_RAW16)
3127 hasRawStream = true;
3128 }
3129
3130 if (!hasJpegStream)
3131 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3132 else
3133 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3134}
3135
3136/*===========================================================================
3137 * FUNCTION : handleBuffersDuringFlushLock
3138 *
3139 * DESCRIPTION: Account for buffers returned from back-end during flush
3140 * This function is executed while mMutex is held by the caller.
3141 *
3142 * PARAMETERS :
3143 * @buffer: image buffer for the callback
3144 *
3145 * RETURN :
3146 *==========================================================================*/
3147void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3148{
3149 bool buffer_found = false;
3150 for (List<PendingBuffersInRequest>::iterator req =
3151 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3152 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3153 for (List<PendingBufferInfo>::iterator i =
3154 req->mPendingBufferList.begin();
3155 i != req->mPendingBufferList.end(); i++) {
3156 if (i->buffer == buffer->buffer) {
3157 mPendingBuffersMap.numPendingBufsAtFlush--;
3158 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3159 buffer->buffer, req->frame_number,
3160 mPendingBuffersMap.numPendingBufsAtFlush);
3161 buffer_found = true;
3162 break;
3163 }
3164 }
3165 if (buffer_found) {
3166 break;
3167 }
3168 }
3169 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3170 //signal the flush()
3171 LOGD("All buffers returned to HAL. Continue flush");
3172 pthread_cond_signal(&mBuffersCond);
3173 }
3174}
3175
Thierry Strudel3d639192016-09-09 11:52:26 -07003176/*===========================================================================
3177 * FUNCTION : handleBatchMetadata
3178 *
3179 * DESCRIPTION: Handles metadata buffer callback in batch mode
3180 *
3181 * PARAMETERS : @metadata_buf: metadata buffer
3182 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3183 * the meta buf in this method
3184 *
3185 * RETURN :
3186 *
3187 *==========================================================================*/
3188void QCamera3HardwareInterface::handleBatchMetadata(
3189 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3190{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003191 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003192
3193 if (NULL == metadata_buf) {
3194 LOGE("metadata_buf is NULL");
3195 return;
3196 }
3197 /* In batch mode, the metdata will contain the frame number and timestamp of
3198 * the last frame in the batch. Eg: a batch containing buffers from request
3199 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3200 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3201 * multiple process_capture_results */
3202 metadata_buffer_t *metadata =
3203 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3204 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3205 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3206 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3207 uint32_t frame_number = 0, urgent_frame_number = 0;
3208 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3209 bool invalid_metadata = false;
3210 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3211 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003212 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003213
3214 int32_t *p_frame_number_valid =
3215 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3216 uint32_t *p_frame_number =
3217 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3218 int64_t *p_capture_time =
3219 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3220 int32_t *p_urgent_frame_number_valid =
3221 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3222 uint32_t *p_urgent_frame_number =
3223 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3224
3225 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3226 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3227 (NULL == p_urgent_frame_number)) {
3228 LOGE("Invalid metadata");
3229 invalid_metadata = true;
3230 } else {
3231 frame_number_valid = *p_frame_number_valid;
3232 last_frame_number = *p_frame_number;
3233 last_frame_capture_time = *p_capture_time;
3234 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3235 last_urgent_frame_number = *p_urgent_frame_number;
3236 }
3237
3238 /* In batchmode, when no video buffers are requested, set_parms are sent
3239 * for every capture_request. The difference between consecutive urgent
3240 * frame numbers and frame numbers should be used to interpolate the
3241 * corresponding frame numbers and time stamps */
3242 pthread_mutex_lock(&mMutex);
3243 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003244 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3245 if(idx < 0) {
3246 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3247 last_urgent_frame_number);
3248 mState = ERROR;
3249 pthread_mutex_unlock(&mMutex);
3250 return;
3251 }
3252 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003253 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3254 first_urgent_frame_number;
3255
3256 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3257 urgent_frame_number_valid,
3258 first_urgent_frame_number, last_urgent_frame_number);
3259 }
3260
3261 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003262 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3263 if(idx < 0) {
3264 LOGE("Invalid frame number received: %d. Irrecoverable error",
3265 last_frame_number);
3266 mState = ERROR;
3267 pthread_mutex_unlock(&mMutex);
3268 return;
3269 }
3270 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003271 frameNumDiff = last_frame_number + 1 -
3272 first_frame_number;
3273 mPendingBatchMap.removeItem(last_frame_number);
3274
3275 LOGD("frm: valid: %d frm_num: %d - %d",
3276 frame_number_valid,
3277 first_frame_number, last_frame_number);
3278
3279 }
3280 pthread_mutex_unlock(&mMutex);
3281
3282 if (urgent_frame_number_valid || frame_number_valid) {
3283 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3284 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3285 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3286 urgentFrameNumDiff, last_urgent_frame_number);
3287 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3288 LOGE("frameNumDiff: %d frameNum: %d",
3289 frameNumDiff, last_frame_number);
3290 }
3291
3292 for (size_t i = 0; i < loopCount; i++) {
3293 /* handleMetadataWithLock is called even for invalid_metadata for
3294 * pipeline depth calculation */
3295 if (!invalid_metadata) {
3296 /* Infer frame number. Batch metadata contains frame number of the
3297 * last frame */
3298 if (urgent_frame_number_valid) {
3299 if (i < urgentFrameNumDiff) {
3300 urgent_frame_number =
3301 first_urgent_frame_number + i;
3302 LOGD("inferred urgent frame_number: %d",
3303 urgent_frame_number);
3304 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3305 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3306 } else {
3307 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3308 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3309 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3310 }
3311 }
3312
3313 /* Infer frame number. Batch metadata contains frame number of the
3314 * last frame */
3315 if (frame_number_valid) {
3316 if (i < frameNumDiff) {
3317 frame_number = first_frame_number + i;
3318 LOGD("inferred frame_number: %d", frame_number);
3319 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3320 CAM_INTF_META_FRAME_NUMBER, frame_number);
3321 } else {
3322 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3323 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3324 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3325 }
3326 }
3327
3328 if (last_frame_capture_time) {
3329 //Infer timestamp
3330 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003331 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003332 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003333 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003334 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3335 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3336 LOGD("batch capture_time: %lld, capture_time: %lld",
3337 last_frame_capture_time, capture_time);
3338 }
3339 }
3340 pthread_mutex_lock(&mMutex);
3341 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003342 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003343 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3344 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003345 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003346 pthread_mutex_unlock(&mMutex);
3347 }
3348
3349 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003350 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003351 mMetadataChannel->bufDone(metadata_buf);
3352 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003353 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003354 }
3355}
3356
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003357void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3358 camera3_error_msg_code_t errorCode)
3359{
3360 camera3_notify_msg_t notify_msg;
3361 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3362 notify_msg.type = CAMERA3_MSG_ERROR;
3363 notify_msg.message.error.error_code = errorCode;
3364 notify_msg.message.error.error_stream = NULL;
3365 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003366 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003367
3368 return;
3369}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003370
3371/*===========================================================================
3372 * FUNCTION : sendPartialMetadataWithLock
3373 *
3374 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3375 *
3376 * PARAMETERS : @metadata: metadata buffer
3377 * @requestIter: The iterator for the pending capture request for
3378 * which the partial result is being sen
3379 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3380 * last urgent metadata in a batch. Always true for non-batch mode
3381 *
3382 * RETURN :
3383 *
3384 *==========================================================================*/
3385
3386void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3387 metadata_buffer_t *metadata,
3388 const pendingRequestIterator requestIter,
3389 bool lastUrgentMetadataInBatch)
3390{
3391 camera3_capture_result_t result;
3392 memset(&result, 0, sizeof(camera3_capture_result_t));
3393
3394 requestIter->partial_result_cnt++;
3395
3396 // Extract 3A metadata
3397 result.result = translateCbUrgentMetadataToResultMetadata(
3398 metadata, lastUrgentMetadataInBatch);
3399 // Populate metadata result
3400 result.frame_number = requestIter->frame_number;
3401 result.num_output_buffers = 0;
3402 result.output_buffers = NULL;
3403 result.partial_result = requestIter->partial_result_cnt;
3404
3405 {
3406 Mutex::Autolock l(gHdrPlusClientLock);
3407 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3408 // Notify HDR+ client about the partial metadata.
3409 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3410 result.partial_result == PARTIAL_RESULT_COUNT);
3411 }
3412 }
3413
3414 orchestrateResult(&result);
3415 LOGD("urgent frame_number = %u", result.frame_number);
3416 free_camera_metadata((camera_metadata_t *)result.result);
3417}
3418
Thierry Strudel3d639192016-09-09 11:52:26 -07003419/*===========================================================================
3420 * FUNCTION : handleMetadataWithLock
3421 *
3422 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3423 *
3424 * PARAMETERS : @metadata_buf: metadata buffer
3425 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3426 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003427 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3428 * last urgent metadata in a batch. Always true for non-batch mode
3429 * @lastMetadataInBatch: Boolean to indicate whether this is the
3430 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003431 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3432 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003433 *
3434 * RETURN :
3435 *
3436 *==========================================================================*/
3437void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003438 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003439 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3440 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003441{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003442 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003443 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3444 //during flush do not send metadata from this thread
3445 LOGD("not sending metadata during flush or when mState is error");
3446 if (free_and_bufdone_meta_buf) {
3447 mMetadataChannel->bufDone(metadata_buf);
3448 free(metadata_buf);
3449 }
3450 return;
3451 }
3452
3453 //not in flush
3454 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3455 int32_t frame_number_valid, urgent_frame_number_valid;
3456 uint32_t frame_number, urgent_frame_number;
3457 int64_t capture_time;
3458 nsecs_t currentSysTime;
3459
3460 int32_t *p_frame_number_valid =
3461 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3462 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3463 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3464 int32_t *p_urgent_frame_number_valid =
3465 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3466 uint32_t *p_urgent_frame_number =
3467 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3468 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3469 metadata) {
3470 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3471 *p_frame_number_valid, *p_frame_number);
3472 }
3473
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003474 camera_metadata_t *resultMetadata = nullptr;
3475
Thierry Strudel3d639192016-09-09 11:52:26 -07003476 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3477 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3478 LOGE("Invalid metadata");
3479 if (free_and_bufdone_meta_buf) {
3480 mMetadataChannel->bufDone(metadata_buf);
3481 free(metadata_buf);
3482 }
3483 goto done_metadata;
3484 }
3485 frame_number_valid = *p_frame_number_valid;
3486 frame_number = *p_frame_number;
3487 capture_time = *p_capture_time;
3488 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3489 urgent_frame_number = *p_urgent_frame_number;
3490 currentSysTime = systemTime(CLOCK_MONOTONIC);
3491
3492 // Detect if buffers from any requests are overdue
3493 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003494 int64_t timeout;
3495 {
3496 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3497 // If there is a pending HDR+ request, the following requests may be blocked until the
3498 // HDR+ request is done. So allow a longer timeout.
3499 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3500 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3501 }
3502
3503 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003504 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003505 assert(missed.stream->priv);
3506 if (missed.stream->priv) {
3507 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3508 assert(ch->mStreams[0]);
3509 if (ch->mStreams[0]) {
3510 LOGE("Cancel missing frame = %d, buffer = %p,"
3511 "stream type = %d, stream format = %d",
3512 req.frame_number, missed.buffer,
3513 ch->mStreams[0]->getMyType(), missed.stream->format);
3514 ch->timeoutFrame(req.frame_number);
3515 }
3516 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 }
3518 }
3519 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003520 //For the very first metadata callback, regardless whether it contains valid
3521 //frame number, send the partial metadata for the jumpstarting requests.
3522 //Note that this has to be done even if the metadata doesn't contain valid
3523 //urgent frame number, because in the case only 1 request is ever submitted
3524 //to HAL, there won't be subsequent valid urgent frame number.
3525 if (mFirstMetadataCallback) {
3526 for (pendingRequestIterator i =
3527 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3528 if (i->bUseFirstPartial) {
3529 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3530 }
3531 }
3532 mFirstMetadataCallback = false;
3533 }
3534
Thierry Strudel3d639192016-09-09 11:52:26 -07003535 //Partial result on process_capture_result for timestamp
3536 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003537 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003538
3539 //Recieved an urgent Frame Number, handle it
3540 //using partial results
3541 for (pendingRequestIterator i =
3542 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3543 LOGD("Iterator Frame = %d urgent frame = %d",
3544 i->frame_number, urgent_frame_number);
3545
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003546 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003547 (i->partial_result_cnt == 0)) {
3548 LOGE("Error: HAL missed urgent metadata for frame number %d",
3549 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003550 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003551 }
3552
3553 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003554 i->partial_result_cnt == 0) {
3555 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003556 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3557 // Instant AEC settled for this frame.
3558 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3559 mInstantAECSettledFrameNumber = urgent_frame_number;
3560 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003561 break;
3562 }
3563 }
3564 }
3565
3566 if (!frame_number_valid) {
3567 LOGD("Not a valid normal frame number, used as SOF only");
3568 if (free_and_bufdone_meta_buf) {
3569 mMetadataChannel->bufDone(metadata_buf);
3570 free(metadata_buf);
3571 }
3572 goto done_metadata;
3573 }
3574 LOGH("valid frame_number = %u, capture_time = %lld",
3575 frame_number, capture_time);
3576
Emilian Peev7650c122017-01-19 08:24:33 -08003577 if (metadata->is_depth_data_valid) {
3578 handleDepthDataLocked(metadata->depth_data, frame_number);
3579 }
3580
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003581 // Check whether any stream buffer corresponding to this is dropped or not
3582 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3583 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3584 for (auto & pendingRequest : mPendingRequestsList) {
3585 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3586 mInstantAECSettledFrameNumber)) {
3587 camera3_notify_msg_t notify_msg = {};
3588 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003589 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003590 QCamera3ProcessingChannel *channel =
3591 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003592 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003593 if (p_cam_frame_drop) {
3594 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003595 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003596 // Got the stream ID for drop frame.
3597 dropFrame = true;
3598 break;
3599 }
3600 }
3601 } else {
3602 // This is instant AEC case.
3603 // For instant AEC drop the stream untill AEC is settled.
3604 dropFrame = true;
3605 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003606
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003607 if (dropFrame) {
3608 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3609 if (p_cam_frame_drop) {
3610 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003611 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003612 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003613 } else {
3614 // For instant AEC, inform frame drop and frame number
3615 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3616 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003617 pendingRequest.frame_number, streamID,
3618 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003619 }
3620 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003621 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003622 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003623 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003624 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003625 if (p_cam_frame_drop) {
3626 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003627 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003628 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003629 } else {
3630 // For instant AEC, inform frame drop and frame number
3631 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3632 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003633 pendingRequest.frame_number, streamID,
3634 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003635 }
3636 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003637 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003638 PendingFrameDrop.stream_ID = streamID;
3639 // Add the Frame drop info to mPendingFrameDropList
3640 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003641 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003642 }
3643 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003645
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003646 for (auto & pendingRequest : mPendingRequestsList) {
3647 // Find the pending request with the frame number.
3648 if (pendingRequest.frame_number == frame_number) {
3649 // Update the sensor timestamp.
3650 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003651
Thierry Strudel3d639192016-09-09 11:52:26 -07003652
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003653 /* Set the timestamp in display metadata so that clients aware of
3654 private_handle such as VT can use this un-modified timestamps.
3655 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003656 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003657
Thierry Strudel3d639192016-09-09 11:52:26 -07003658 // Find channel requiring metadata, meaning internal offline postprocess
3659 // is needed.
3660 //TODO: for now, we don't support two streams requiring metadata at the same time.
3661 // (because we are not making copies, and metadata buffer is not reference counted.
3662 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003663 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3664 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003665 if (iter->need_metadata) {
3666 internalPproc = true;
3667 QCamera3ProcessingChannel *channel =
3668 (QCamera3ProcessingChannel *)iter->stream->priv;
3669 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003670 if(p_is_metabuf_queued != NULL) {
3671 *p_is_metabuf_queued = true;
3672 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003673 break;
3674 }
3675 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003676 for (auto itr = pendingRequest.internalRequestList.begin();
3677 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003678 if (itr->need_metadata) {
3679 internalPproc = true;
3680 QCamera3ProcessingChannel *channel =
3681 (QCamera3ProcessingChannel *)itr->stream->priv;
3682 channel->queueReprocMetadata(metadata_buf);
3683 break;
3684 }
3685 }
3686
Thierry Strudel54dc9782017-02-15 12:12:10 -08003687 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003688
3689 bool *enableZsl = nullptr;
3690 if (gExposeEnableZslKey) {
3691 enableZsl = &pendingRequest.enableZsl;
3692 }
3693
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003694 resultMetadata = translateFromHalMetadata(metadata,
3695 pendingRequest.timestamp, pendingRequest.request_id,
3696 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3697 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003698 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003699 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003700 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003701 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003702 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003703 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003704
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003705 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003706
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003707 if (pendingRequest.blob_request) {
3708 //Dump tuning metadata if enabled and available
3709 char prop[PROPERTY_VALUE_MAX];
3710 memset(prop, 0, sizeof(prop));
3711 property_get("persist.camera.dumpmetadata", prop, "0");
3712 int32_t enabled = atoi(prop);
3713 if (enabled && metadata->is_tuning_params_valid) {
3714 dumpMetadataToFile(metadata->tuning_params,
3715 mMetaFrameCount,
3716 enabled,
3717 "Snapshot",
3718 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003719 }
3720 }
3721
3722 if (!internalPproc) {
3723 LOGD("couldn't find need_metadata for this metadata");
3724 // Return metadata buffer
3725 if (free_and_bufdone_meta_buf) {
3726 mMetadataChannel->bufDone(metadata_buf);
3727 free(metadata_buf);
3728 }
3729 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003730
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003731 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003732 }
3733 }
3734
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003735 // Try to send out shutter callbacks and capture results.
3736 handlePendingResultsWithLock(frame_number, resultMetadata);
3737 return;
3738
Thierry Strudel3d639192016-09-09 11:52:26 -07003739done_metadata:
3740 for (pendingRequestIterator i = mPendingRequestsList.begin();
3741 i != mPendingRequestsList.end() ;i++) {
3742 i->pipeline_depth++;
3743 }
3744 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3745 unblockRequestIfNecessary();
3746}
3747
3748/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003749 * FUNCTION : handleDepthDataWithLock
3750 *
3751 * DESCRIPTION: Handles incoming depth data
3752 *
3753 * PARAMETERS : @depthData : Depth data
3754 * @frameNumber: Frame number of the incoming depth data
3755 *
3756 * RETURN :
3757 *
3758 *==========================================================================*/
3759void QCamera3HardwareInterface::handleDepthDataLocked(
3760 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3761 uint32_t currentFrameNumber;
3762 buffer_handle_t *depthBuffer;
3763
3764 if (nullptr == mDepthChannel) {
3765 LOGE("Depth channel not present!");
3766 return;
3767 }
3768
3769 camera3_stream_buffer_t resultBuffer =
3770 {.acquire_fence = -1,
3771 .release_fence = -1,
3772 .status = CAMERA3_BUFFER_STATUS_OK,
3773 .buffer = nullptr,
3774 .stream = mDepthChannel->getStream()};
3775 camera3_capture_result_t result =
3776 {.result = nullptr,
3777 .num_output_buffers = 1,
3778 .output_buffers = &resultBuffer,
3779 .partial_result = 0,
3780 .frame_number = 0};
3781
3782 do {
3783 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3784 if (nullptr == depthBuffer) {
3785 break;
3786 }
3787
3788 result.frame_number = currentFrameNumber;
3789 resultBuffer.buffer = depthBuffer;
3790 if (currentFrameNumber == frameNumber) {
3791 int32_t rc = mDepthChannel->populateDepthData(depthData,
3792 frameNumber);
3793 if (NO_ERROR != rc) {
3794 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3795 } else {
3796 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3797 }
3798 } else if (currentFrameNumber > frameNumber) {
3799 break;
3800 } else {
3801 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3802 {{currentFrameNumber, mDepthChannel->getStream(),
3803 CAMERA3_MSG_ERROR_BUFFER}}};
3804 orchestrateNotify(&notify_msg);
3805
3806 LOGE("Depth buffer for frame number: %d is missing "
3807 "returning back!", currentFrameNumber);
3808 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3809 }
3810 mDepthChannel->unmapBuffer(currentFrameNumber);
3811
3812 orchestrateResult(&result);
3813 } while (currentFrameNumber < frameNumber);
3814}
3815
3816/*===========================================================================
3817 * FUNCTION : notifyErrorFoPendingDepthData
3818 *
3819 * DESCRIPTION: Returns error for any pending depth buffers
3820 *
3821 * PARAMETERS : depthCh - depth channel that needs to get flushed
3822 *
3823 * RETURN :
3824 *
3825 *==========================================================================*/
3826void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3827 QCamera3DepthChannel *depthCh) {
3828 uint32_t currentFrameNumber;
3829 buffer_handle_t *depthBuffer;
3830
3831 if (nullptr == depthCh) {
3832 return;
3833 }
3834
3835 camera3_notify_msg_t notify_msg =
3836 {.type = CAMERA3_MSG_ERROR,
3837 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3838 camera3_stream_buffer_t resultBuffer =
3839 {.acquire_fence = -1,
3840 .release_fence = -1,
3841 .buffer = nullptr,
3842 .stream = depthCh->getStream(),
3843 .status = CAMERA3_BUFFER_STATUS_ERROR};
3844 camera3_capture_result_t result =
3845 {.result = nullptr,
3846 .frame_number = 0,
3847 .num_output_buffers = 1,
3848 .partial_result = 0,
3849 .output_buffers = &resultBuffer};
3850
3851 while (nullptr !=
3852 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3853 depthCh->unmapBuffer(currentFrameNumber);
3854
3855 notify_msg.message.error.frame_number = currentFrameNumber;
3856 orchestrateNotify(&notify_msg);
3857
3858 resultBuffer.buffer = depthBuffer;
3859 result.frame_number = currentFrameNumber;
3860 orchestrateResult(&result);
3861 };
3862}
3863
3864/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003865 * FUNCTION : hdrPlusPerfLock
3866 *
3867 * DESCRIPTION: perf lock for HDR+ using custom intent
3868 *
3869 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3870 *
3871 * RETURN : None
3872 *
3873 *==========================================================================*/
3874void QCamera3HardwareInterface::hdrPlusPerfLock(
3875 mm_camera_super_buf_t *metadata_buf)
3876{
3877 if (NULL == metadata_buf) {
3878 LOGE("metadata_buf is NULL");
3879 return;
3880 }
3881 metadata_buffer_t *metadata =
3882 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3883 int32_t *p_frame_number_valid =
3884 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3885 uint32_t *p_frame_number =
3886 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3887
3888 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3889 LOGE("%s: Invalid metadata", __func__);
3890 return;
3891 }
3892
3893 //acquire perf lock for 5 sec after the last HDR frame is captured
3894 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3895 if ((p_frame_number != NULL) &&
3896 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003897 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003898 }
3899 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003900}
3901
3902/*===========================================================================
3903 * FUNCTION : handleInputBufferWithLock
3904 *
3905 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3906 *
3907 * PARAMETERS : @frame_number: frame number of the input buffer
3908 *
3909 * RETURN :
3910 *
3911 *==========================================================================*/
3912void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3913{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003914 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003915 pendingRequestIterator i = mPendingRequestsList.begin();
3916 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3917 i++;
3918 }
3919 if (i != mPendingRequestsList.end() && i->input_buffer) {
3920 //found the right request
3921 if (!i->shutter_notified) {
3922 CameraMetadata settings;
3923 camera3_notify_msg_t notify_msg;
3924 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3925 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3926 if(i->settings) {
3927 settings = i->settings;
3928 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3929 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3930 } else {
3931 LOGE("No timestamp in input settings! Using current one.");
3932 }
3933 } else {
3934 LOGE("Input settings missing!");
3935 }
3936
3937 notify_msg.type = CAMERA3_MSG_SHUTTER;
3938 notify_msg.message.shutter.frame_number = frame_number;
3939 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003940 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003941 i->shutter_notified = true;
3942 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3943 i->frame_number, notify_msg.message.shutter.timestamp);
3944 }
3945
3946 if (i->input_buffer->release_fence != -1) {
3947 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3948 close(i->input_buffer->release_fence);
3949 if (rc != OK) {
3950 LOGE("input buffer sync wait failed %d", rc);
3951 }
3952 }
3953
3954 camera3_capture_result result;
3955 memset(&result, 0, sizeof(camera3_capture_result));
3956 result.frame_number = frame_number;
3957 result.result = i->settings;
3958 result.input_buffer = i->input_buffer;
3959 result.partial_result = PARTIAL_RESULT_COUNT;
3960
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003961 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003962 LOGD("Input request metadata and input buffer frame_number = %u",
3963 i->frame_number);
3964 i = erasePendingRequest(i);
3965 } else {
3966 LOGE("Could not find input request for frame number %d", frame_number);
3967 }
3968}
3969
3970/*===========================================================================
3971 * FUNCTION : handleBufferWithLock
3972 *
3973 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3974 *
3975 * PARAMETERS : @buffer: image buffer for the callback
3976 * @frame_number: frame number of the image buffer
3977 *
3978 * RETURN :
3979 *
3980 *==========================================================================*/
3981void QCamera3HardwareInterface::handleBufferWithLock(
3982 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3983{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003984 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003985
3986 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3987 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3988 }
3989
Thierry Strudel3d639192016-09-09 11:52:26 -07003990 /* Nothing to be done during error state */
3991 if ((ERROR == mState) || (DEINIT == mState)) {
3992 return;
3993 }
3994 if (mFlushPerf) {
3995 handleBuffersDuringFlushLock(buffer);
3996 return;
3997 }
3998 //not in flush
3999 // If the frame number doesn't exist in the pending request list,
4000 // directly send the buffer to the frameworks, and update pending buffers map
4001 // Otherwise, book-keep the buffer.
4002 pendingRequestIterator i = mPendingRequestsList.begin();
4003 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4004 i++;
4005 }
4006 if (i == mPendingRequestsList.end()) {
4007 // Verify all pending requests frame_numbers are greater
4008 for (pendingRequestIterator j = mPendingRequestsList.begin();
4009 j != mPendingRequestsList.end(); j++) {
4010 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
4011 LOGW("Error: pending live frame number %d is smaller than %d",
4012 j->frame_number, frame_number);
4013 }
4014 }
4015 camera3_capture_result_t result;
4016 memset(&result, 0, sizeof(camera3_capture_result_t));
4017 result.result = NULL;
4018 result.frame_number = frame_number;
4019 result.num_output_buffers = 1;
4020 result.partial_result = 0;
4021 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4022 m != mPendingFrameDropList.end(); m++) {
4023 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4024 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4025 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4026 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4027 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4028 frame_number, streamID);
4029 m = mPendingFrameDropList.erase(m);
4030 break;
4031 }
4032 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004033 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07004034 result.output_buffers = buffer;
4035 LOGH("result frame_number = %d, buffer = %p",
4036 frame_number, buffer->buffer);
4037
4038 mPendingBuffersMap.removeBuf(buffer->buffer);
4039
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004040 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004041 } else {
4042 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004043 if (i->input_buffer->release_fence != -1) {
4044 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4045 close(i->input_buffer->release_fence);
4046 if (rc != OK) {
4047 LOGE("input buffer sync wait failed %d", rc);
4048 }
4049 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004050 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004051
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004052 // Put buffer into the pending request
4053 for (auto &requestedBuffer : i->buffers) {
4054 if (requestedBuffer.stream == buffer->stream) {
4055 if (requestedBuffer.buffer != nullptr) {
4056 LOGE("Error: buffer is already set");
4057 } else {
4058 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4059 sizeof(camera3_stream_buffer_t));
4060 *(requestedBuffer.buffer) = *buffer;
4061 LOGH("cache buffer %p at result frame_number %u",
4062 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004063 }
4064 }
4065 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004066
4067 if (i->input_buffer) {
4068 // For a reprocessing request, try to send out shutter callback and result metadata.
4069 handlePendingResultsWithLock(frame_number, nullptr);
4070 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004071 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004072
4073 if (mPreviewStarted == false) {
4074 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4075 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004076 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4077
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004078 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4079 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4080 mPreviewStarted = true;
4081
4082 // Set power hint for preview
4083 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4084 }
4085 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004086}
4087
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004088void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4089 const camera_metadata_t *resultMetadata)
4090{
4091 // Find the pending request for this result metadata.
4092 auto requestIter = mPendingRequestsList.begin();
4093 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4094 requestIter++;
4095 }
4096
4097 if (requestIter == mPendingRequestsList.end()) {
4098 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4099 return;
4100 }
4101
4102 // Update the result metadata
4103 requestIter->resultMetadata = resultMetadata;
4104
4105 // Check what type of request this is.
4106 bool liveRequest = false;
4107 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004108 // HDR+ request doesn't have partial results.
4109 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004110 } else if (requestIter->input_buffer != nullptr) {
4111 // Reprocessing request result is the same as settings.
4112 requestIter->resultMetadata = requestIter->settings;
4113 // Reprocessing request doesn't have partial results.
4114 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4115 } else {
4116 liveRequest = true;
4117 requestIter->partial_result_cnt++;
4118 mPendingLiveRequest--;
4119
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004120 {
4121 Mutex::Autolock l(gHdrPlusClientLock);
4122 // For a live request, send the metadata to HDR+ client.
4123 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4124 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4125 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4126 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004127 }
4128 }
4129
4130 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4131 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4132 bool readyToSend = true;
4133
4134 // Iterate through the pending requests to send out shutter callbacks and results that are
4135 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4136 // live requests that don't have result metadata yet.
4137 auto iter = mPendingRequestsList.begin();
4138 while (iter != mPendingRequestsList.end()) {
4139 // Check if current pending request is ready. If it's not ready, the following pending
4140 // requests are also not ready.
4141 if (readyToSend && iter->resultMetadata == nullptr) {
4142 readyToSend = false;
4143 }
4144
4145 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4146
4147 std::vector<camera3_stream_buffer_t> outputBuffers;
4148
4149 camera3_capture_result_t result = {};
4150 result.frame_number = iter->frame_number;
4151 result.result = iter->resultMetadata;
4152 result.partial_result = iter->partial_result_cnt;
4153
4154 // If this pending buffer has result metadata, we may be able to send out shutter callback
4155 // and result metadata.
4156 if (iter->resultMetadata != nullptr) {
4157 if (!readyToSend) {
4158 // If any of the previous pending request is not ready, this pending request is
4159 // also not ready to send in order to keep shutter callbacks and result metadata
4160 // in order.
4161 iter++;
4162 continue;
4163 }
4164
4165 // Invoke shutter callback if not yet.
4166 if (!iter->shutter_notified) {
4167 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4168
4169 // Find the timestamp in HDR+ result metadata
4170 camera_metadata_ro_entry_t entry;
4171 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4172 ANDROID_SENSOR_TIMESTAMP, &entry);
4173 if (res != OK) {
4174 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4175 __FUNCTION__, iter->frame_number, strerror(-res), res);
4176 } else {
4177 timestamp = entry.data.i64[0];
4178 }
4179
4180 camera3_notify_msg_t notify_msg = {};
4181 notify_msg.type = CAMERA3_MSG_SHUTTER;
4182 notify_msg.message.shutter.frame_number = iter->frame_number;
4183 notify_msg.message.shutter.timestamp = timestamp;
4184 orchestrateNotify(&notify_msg);
4185 iter->shutter_notified = true;
4186 }
4187
4188 result.input_buffer = iter->input_buffer;
4189
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004190 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4191 // If the result metadata belongs to a live request, notify errors for previous pending
4192 // live requests.
4193 mPendingLiveRequest--;
4194
4195 CameraMetadata dummyMetadata;
4196 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4197 result.result = dummyMetadata.release();
4198
4199 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004200
4201 // partial_result should be PARTIAL_RESULT_CNT in case of
4202 // ERROR_RESULT.
4203 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4204 result.partial_result = PARTIAL_RESULT_COUNT;
4205
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004206 } else {
4207 iter++;
4208 continue;
4209 }
4210
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004211 // Prepare output buffer array
4212 for (auto bufferInfoIter = iter->buffers.begin();
4213 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4214 if (bufferInfoIter->buffer != nullptr) {
4215
4216 QCamera3Channel *channel =
4217 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4218 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4219
4220 // Check if this buffer is a dropped frame.
4221 auto frameDropIter = mPendingFrameDropList.begin();
4222 while (frameDropIter != mPendingFrameDropList.end()) {
4223 if((frameDropIter->stream_ID == streamID) &&
4224 (frameDropIter->frame_number == frameNumber)) {
4225 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4226 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4227 streamID);
4228 mPendingFrameDropList.erase(frameDropIter);
4229 break;
4230 } else {
4231 frameDropIter++;
4232 }
4233 }
4234
4235 // Check buffer error status
4236 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4237 bufferInfoIter->buffer->buffer);
4238 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4239
4240 outputBuffers.push_back(*(bufferInfoIter->buffer));
4241 free(bufferInfoIter->buffer);
4242 bufferInfoIter->buffer = NULL;
4243 }
4244 }
4245
4246 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4247 result.num_output_buffers = outputBuffers.size();
4248
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004249 orchestrateResult(&result);
4250
4251 // For reprocessing, result metadata is the same as settings so do not free it here to
4252 // avoid double free.
4253 if (result.result != iter->settings) {
4254 free_camera_metadata((camera_metadata_t *)result.result);
4255 }
4256 iter->resultMetadata = nullptr;
4257 iter = erasePendingRequest(iter);
4258 }
4259
4260 if (liveRequest) {
4261 for (auto &iter : mPendingRequestsList) {
4262 // Increment pipeline depth for the following pending requests.
4263 if (iter.frame_number > frameNumber) {
4264 iter.pipeline_depth++;
4265 }
4266 }
4267 }
4268
4269 unblockRequestIfNecessary();
4270}
4271
Thierry Strudel3d639192016-09-09 11:52:26 -07004272/*===========================================================================
4273 * FUNCTION : unblockRequestIfNecessary
4274 *
4275 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4276 * that mMutex is held when this function is called.
4277 *
4278 * PARAMETERS :
4279 *
4280 * RETURN :
4281 *
4282 *==========================================================================*/
4283void QCamera3HardwareInterface::unblockRequestIfNecessary()
4284{
4285 // Unblock process_capture_request
4286 pthread_cond_signal(&mRequestCond);
4287}
4288
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004289/*===========================================================================
4290 * FUNCTION : isHdrSnapshotRequest
4291 *
4292 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4293 *
4294 * PARAMETERS : camera3 request structure
4295 *
4296 * RETURN : boolean decision variable
4297 *
4298 *==========================================================================*/
4299bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4300{
4301 if (request == NULL) {
4302 LOGE("Invalid request handle");
4303 assert(0);
4304 return false;
4305 }
4306
4307 if (!mForceHdrSnapshot) {
4308 CameraMetadata frame_settings;
4309 frame_settings = request->settings;
4310
4311 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4312 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4313 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4314 return false;
4315 }
4316 } else {
4317 return false;
4318 }
4319
4320 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4321 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4322 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4323 return false;
4324 }
4325 } else {
4326 return false;
4327 }
4328 }
4329
4330 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4331 if (request->output_buffers[i].stream->format
4332 == HAL_PIXEL_FORMAT_BLOB) {
4333 return true;
4334 }
4335 }
4336
4337 return false;
4338}
4339/*===========================================================================
4340 * FUNCTION : orchestrateRequest
4341 *
4342 * DESCRIPTION: Orchestrates a capture request from camera service
4343 *
4344 * PARAMETERS :
4345 * @request : request from framework to process
4346 *
4347 * RETURN : Error status codes
4348 *
4349 *==========================================================================*/
4350int32_t QCamera3HardwareInterface::orchestrateRequest(
4351 camera3_capture_request_t *request)
4352{
4353
4354 uint32_t originalFrameNumber = request->frame_number;
4355 uint32_t originalOutputCount = request->num_output_buffers;
4356 const camera_metadata_t *original_settings = request->settings;
4357 List<InternalRequest> internallyRequestedStreams;
4358 List<InternalRequest> emptyInternalList;
4359
4360 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4361 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4362 uint32_t internalFrameNumber;
4363 CameraMetadata modified_meta;
4364
4365
4366 /* Add Blob channel to list of internally requested streams */
4367 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4368 if (request->output_buffers[i].stream->format
4369 == HAL_PIXEL_FORMAT_BLOB) {
4370 InternalRequest streamRequested;
4371 streamRequested.meteringOnly = 1;
4372 streamRequested.need_metadata = 0;
4373 streamRequested.stream = request->output_buffers[i].stream;
4374 internallyRequestedStreams.push_back(streamRequested);
4375 }
4376 }
4377 request->num_output_buffers = 0;
4378 auto itr = internallyRequestedStreams.begin();
4379
4380 /* Modify setting to set compensation */
4381 modified_meta = request->settings;
4382 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4383 uint8_t aeLock = 1;
4384 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4385 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4386 camera_metadata_t *modified_settings = modified_meta.release();
4387 request->settings = modified_settings;
4388
4389 /* Capture Settling & -2x frame */
4390 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4391 request->frame_number = internalFrameNumber;
4392 processCaptureRequest(request, internallyRequestedStreams);
4393
4394 request->num_output_buffers = originalOutputCount;
4395 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4396 request->frame_number = internalFrameNumber;
4397 processCaptureRequest(request, emptyInternalList);
4398 request->num_output_buffers = 0;
4399
4400 modified_meta = modified_settings;
4401 expCompensation = 0;
4402 aeLock = 1;
4403 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4404 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4405 modified_settings = modified_meta.release();
4406 request->settings = modified_settings;
4407
4408 /* Capture Settling & 0X frame */
4409
4410 itr = internallyRequestedStreams.begin();
4411 if (itr == internallyRequestedStreams.end()) {
4412 LOGE("Error Internally Requested Stream list is empty");
4413 assert(0);
4414 } else {
4415 itr->need_metadata = 0;
4416 itr->meteringOnly = 1;
4417 }
4418
4419 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4420 request->frame_number = internalFrameNumber;
4421 processCaptureRequest(request, internallyRequestedStreams);
4422
4423 itr = internallyRequestedStreams.begin();
4424 if (itr == internallyRequestedStreams.end()) {
4425 ALOGE("Error Internally Requested Stream list is empty");
4426 assert(0);
4427 } else {
4428 itr->need_metadata = 1;
4429 itr->meteringOnly = 0;
4430 }
4431
4432 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4433 request->frame_number = internalFrameNumber;
4434 processCaptureRequest(request, internallyRequestedStreams);
4435
4436 /* Capture 2X frame*/
4437 modified_meta = modified_settings;
4438 expCompensation = GB_HDR_2X_STEP_EV;
4439 aeLock = 1;
4440 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4441 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4442 modified_settings = modified_meta.release();
4443 request->settings = modified_settings;
4444
4445 itr = internallyRequestedStreams.begin();
4446 if (itr == internallyRequestedStreams.end()) {
4447 ALOGE("Error Internally Requested Stream list is empty");
4448 assert(0);
4449 } else {
4450 itr->need_metadata = 0;
4451 itr->meteringOnly = 1;
4452 }
4453 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4454 request->frame_number = internalFrameNumber;
4455 processCaptureRequest(request, internallyRequestedStreams);
4456
4457 itr = internallyRequestedStreams.begin();
4458 if (itr == internallyRequestedStreams.end()) {
4459 ALOGE("Error Internally Requested Stream list is empty");
4460 assert(0);
4461 } else {
4462 itr->need_metadata = 1;
4463 itr->meteringOnly = 0;
4464 }
4465
4466 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4467 request->frame_number = internalFrameNumber;
4468 processCaptureRequest(request, internallyRequestedStreams);
4469
4470
4471 /* Capture 2X on original streaming config*/
4472 internallyRequestedStreams.clear();
4473
4474 /* Restore original settings pointer */
4475 request->settings = original_settings;
4476 } else {
4477 uint32_t internalFrameNumber;
4478 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4479 request->frame_number = internalFrameNumber;
4480 return processCaptureRequest(request, internallyRequestedStreams);
4481 }
4482
4483 return NO_ERROR;
4484}
4485
4486/*===========================================================================
4487 * FUNCTION : orchestrateResult
4488 *
4489 * DESCRIPTION: Orchestrates a capture result to camera service
4490 *
4491 * PARAMETERS :
4492 * @request : request from framework to process
4493 *
4494 * RETURN :
4495 *
4496 *==========================================================================*/
4497void QCamera3HardwareInterface::orchestrateResult(
4498 camera3_capture_result_t *result)
4499{
4500 uint32_t frameworkFrameNumber;
4501 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4502 frameworkFrameNumber);
4503 if (rc != NO_ERROR) {
4504 LOGE("Cannot find translated frameworkFrameNumber");
4505 assert(0);
4506 } else {
4507 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004508 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004509 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004510 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004511 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4512 camera_metadata_entry_t entry;
4513 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4514 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004515 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004516 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4517 if (ret != OK)
4518 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004519 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004520 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004521 result->frame_number = frameworkFrameNumber;
4522 mCallbackOps->process_capture_result(mCallbackOps, result);
4523 }
4524 }
4525}
4526
4527/*===========================================================================
4528 * FUNCTION : orchestrateNotify
4529 *
4530 * DESCRIPTION: Orchestrates a notify to camera service
4531 *
4532 * PARAMETERS :
4533 * @request : request from framework to process
4534 *
4535 * RETURN :
4536 *
4537 *==========================================================================*/
4538void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4539{
4540 uint32_t frameworkFrameNumber;
4541 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004542 int32_t rc = NO_ERROR;
4543
4544 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004545 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004546
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004547 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004548 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4549 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4550 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004551 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004552 LOGE("Cannot find translated frameworkFrameNumber");
4553 assert(0);
4554 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004555 }
4556 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004557
4558 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4559 LOGD("Internal Request drop the notifyCb");
4560 } else {
4561 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4562 mCallbackOps->notify(mCallbackOps, notify_msg);
4563 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004564}
4565
4566/*===========================================================================
4567 * FUNCTION : FrameNumberRegistry
4568 *
4569 * DESCRIPTION: Constructor
4570 *
4571 * PARAMETERS :
4572 *
4573 * RETURN :
4574 *
4575 *==========================================================================*/
4576FrameNumberRegistry::FrameNumberRegistry()
4577{
4578 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4579}
4580
4581/*===========================================================================
4582 * FUNCTION : ~FrameNumberRegistry
4583 *
4584 * DESCRIPTION: Destructor
4585 *
4586 * PARAMETERS :
4587 *
4588 * RETURN :
4589 *
4590 *==========================================================================*/
4591FrameNumberRegistry::~FrameNumberRegistry()
4592{
4593}
4594
4595/*===========================================================================
4596 * FUNCTION : PurgeOldEntriesLocked
4597 *
4598 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4599 *
4600 * PARAMETERS :
4601 *
4602 * RETURN : NONE
4603 *
4604 *==========================================================================*/
4605void FrameNumberRegistry::purgeOldEntriesLocked()
4606{
4607 while (_register.begin() != _register.end()) {
4608 auto itr = _register.begin();
4609 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4610 _register.erase(itr);
4611 } else {
4612 return;
4613 }
4614 }
4615}
4616
4617/*===========================================================================
4618 * FUNCTION : allocStoreInternalFrameNumber
4619 *
4620 * DESCRIPTION: Method to note down a framework request and associate a new
4621 * internal request number against it
4622 *
4623 * PARAMETERS :
4624 * @fFrameNumber: Identifier given by framework
4625 * @internalFN : Output parameter which will have the newly generated internal
4626 * entry
4627 *
4628 * RETURN : Error code
4629 *
4630 *==========================================================================*/
4631int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4632 uint32_t &internalFrameNumber)
4633{
4634 Mutex::Autolock lock(mRegistryLock);
4635 internalFrameNumber = _nextFreeInternalNumber++;
4636 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4637 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4638 purgeOldEntriesLocked();
4639 return NO_ERROR;
4640}
4641
4642/*===========================================================================
4643 * FUNCTION : generateStoreInternalFrameNumber
4644 *
4645 * DESCRIPTION: Method to associate a new internal request number independent
4646 * of any associate with framework requests
4647 *
4648 * PARAMETERS :
4649 * @internalFrame#: Output parameter which will have the newly generated internal
4650 *
4651 *
4652 * RETURN : Error code
4653 *
4654 *==========================================================================*/
4655int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4656{
4657 Mutex::Autolock lock(mRegistryLock);
4658 internalFrameNumber = _nextFreeInternalNumber++;
4659 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4660 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4661 purgeOldEntriesLocked();
4662 return NO_ERROR;
4663}
4664
4665/*===========================================================================
4666 * FUNCTION : getFrameworkFrameNumber
4667 *
4668 * DESCRIPTION: Method to query the framework framenumber given an internal #
4669 *
4670 * PARAMETERS :
4671 * @internalFrame#: Internal reference
4672 * @frameworkframenumber: Output parameter holding framework frame entry
4673 *
4674 * RETURN : Error code
4675 *
4676 *==========================================================================*/
4677int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4678 uint32_t &frameworkFrameNumber)
4679{
4680 Mutex::Autolock lock(mRegistryLock);
4681 auto itr = _register.find(internalFrameNumber);
4682 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004683 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004684 return -ENOENT;
4685 }
4686
4687 frameworkFrameNumber = itr->second;
4688 purgeOldEntriesLocked();
4689 return NO_ERROR;
4690}
Thierry Strudel3d639192016-09-09 11:52:26 -07004691
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004692status_t QCamera3HardwareInterface::fillPbStreamConfig(
4693 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4694 QCamera3Channel *channel, uint32_t streamIndex) {
4695 if (config == nullptr) {
4696 LOGE("%s: config is null", __FUNCTION__);
4697 return BAD_VALUE;
4698 }
4699
4700 if (channel == nullptr) {
4701 LOGE("%s: channel is null", __FUNCTION__);
4702 return BAD_VALUE;
4703 }
4704
4705 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4706 if (stream == nullptr) {
4707 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4708 return NAME_NOT_FOUND;
4709 }
4710
4711 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4712 if (streamInfo == nullptr) {
4713 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4714 return NAME_NOT_FOUND;
4715 }
4716
4717 config->id = pbStreamId;
4718 config->image.width = streamInfo->dim.width;
4719 config->image.height = streamInfo->dim.height;
4720 config->image.padding = 0;
4721 config->image.format = pbStreamFormat;
4722
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004723 uint32_t totalPlaneSize = 0;
4724
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004725 // Fill plane information.
4726 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4727 pbcamera::PlaneConfiguration plane;
4728 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4729 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4730 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004731
4732 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004733 }
4734
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004735 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004736 return OK;
4737}
4738
Thierry Strudel3d639192016-09-09 11:52:26 -07004739/*===========================================================================
4740 * FUNCTION : processCaptureRequest
4741 *
4742 * DESCRIPTION: process a capture request from camera service
4743 *
4744 * PARAMETERS :
4745 * @request : request from framework to process
4746 *
4747 * RETURN :
4748 *
4749 *==========================================================================*/
4750int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004751 camera3_capture_request_t *request,
4752 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004753{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004754 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004755 int rc = NO_ERROR;
4756 int32_t request_id;
4757 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004758 bool isVidBufRequested = false;
4759 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004760 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004761
4762 pthread_mutex_lock(&mMutex);
4763
4764 // Validate current state
4765 switch (mState) {
4766 case CONFIGURED:
4767 case STARTED:
4768 /* valid state */
4769 break;
4770
4771 case ERROR:
4772 pthread_mutex_unlock(&mMutex);
4773 handleCameraDeviceError();
4774 return -ENODEV;
4775
4776 default:
4777 LOGE("Invalid state %d", mState);
4778 pthread_mutex_unlock(&mMutex);
4779 return -ENODEV;
4780 }
4781
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004782 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004783 if (rc != NO_ERROR) {
4784 LOGE("incoming request is not valid");
4785 pthread_mutex_unlock(&mMutex);
4786 return rc;
4787 }
4788
4789 meta = request->settings;
4790
4791 // For first capture request, send capture intent, and
4792 // stream on all streams
4793 if (mState == CONFIGURED) {
4794 // send an unconfigure to the backend so that the isp
4795 // resources are deallocated
4796 if (!mFirstConfiguration) {
4797 cam_stream_size_info_t stream_config_info;
4798 int32_t hal_version = CAM_HAL_V3;
4799 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4800 stream_config_info.buffer_info.min_buffers =
4801 MIN_INFLIGHT_REQUESTS;
4802 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004803 m_bIs4KVideo ? 0 :
4804 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004805 clear_metadata_buffer(mParameters);
4806 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4807 CAM_INTF_PARM_HAL_VERSION, hal_version);
4808 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4809 CAM_INTF_META_STREAM_INFO, stream_config_info);
4810 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4811 mParameters);
4812 if (rc < 0) {
4813 LOGE("set_parms for unconfigure failed");
4814 pthread_mutex_unlock(&mMutex);
4815 return rc;
4816 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004817
Thierry Strudel3d639192016-09-09 11:52:26 -07004818 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004819 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004820 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004821 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004822 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004823 property_get("persist.camera.is_type", is_type_value, "4");
4824 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4825 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4826 property_get("persist.camera.is_type_preview", is_type_value, "4");
4827 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4828 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004829
4830 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4831 int32_t hal_version = CAM_HAL_V3;
4832 uint8_t captureIntent =
4833 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4834 mCaptureIntent = captureIntent;
4835 clear_metadata_buffer(mParameters);
4836 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4837 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4838 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004839 if (mFirstConfiguration) {
4840 // configure instant AEC
4841 // Instant AEC is a session based parameter and it is needed only
4842 // once per complete session after open camera.
4843 // i.e. This is set only once for the first capture request, after open camera.
4844 setInstantAEC(meta);
4845 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004846 uint8_t fwkVideoStabMode=0;
4847 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4848 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4849 }
4850
Xue Tuecac74e2017-04-17 13:58:15 -07004851 // If EIS setprop is enabled then only turn it on for video/preview
4852 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004853 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 int32_t vsMode;
4855 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4856 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4857 rc = BAD_VALUE;
4858 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004859 LOGD("setEis %d", setEis);
4860 bool eis3Supported = false;
4861 size_t count = IS_TYPE_MAX;
4862 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4863 for (size_t i = 0; i < count; i++) {
4864 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4865 eis3Supported = true;
4866 break;
4867 }
4868 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004869
4870 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004871 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004872 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4873 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004874 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4875 is_type = isTypePreview;
4876 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4877 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4878 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004879 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004880 } else {
4881 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004882 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004883 } else {
4884 is_type = IS_TYPE_NONE;
4885 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004886 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004887 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004888 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4889 }
4890 }
4891
4892 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4893 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4894
Thierry Strudel54dc9782017-02-15 12:12:10 -08004895 //Disable tintless only if the property is set to 0
4896 memset(prop, 0, sizeof(prop));
4897 property_get("persist.camera.tintless.enable", prop, "1");
4898 int32_t tintless_value = atoi(prop);
4899
Thierry Strudel3d639192016-09-09 11:52:26 -07004900 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4901 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004902
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 //Disable CDS for HFR mode or if DIS/EIS is on.
4904 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4905 //after every configure_stream
4906 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4907 (m_bIsVideo)) {
4908 int32_t cds = CAM_CDS_MODE_OFF;
4909 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4910 CAM_INTF_PARM_CDS_MODE, cds))
4911 LOGE("Failed to disable CDS for HFR mode");
4912
4913 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004914
4915 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4916 uint8_t* use_av_timer = NULL;
4917
4918 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004919 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004920 use_av_timer = &m_debug_avtimer;
4921 }
4922 else{
4923 use_av_timer =
4924 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004925 if (use_av_timer) {
4926 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4927 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004928 }
4929
4930 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4931 rc = BAD_VALUE;
4932 }
4933 }
4934
Thierry Strudel3d639192016-09-09 11:52:26 -07004935 setMobicat();
4936
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004937 uint8_t nrMode = 0;
4938 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4939 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4940 }
4941
Thierry Strudel3d639192016-09-09 11:52:26 -07004942 /* Set fps and hfr mode while sending meta stream info so that sensor
4943 * can configure appropriate streaming mode */
4944 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004945 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4946 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004947 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4948 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004949 if (rc == NO_ERROR) {
4950 int32_t max_fps =
4951 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004952 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004953 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4954 }
4955 /* For HFR, more buffers are dequeued upfront to improve the performance */
4956 if (mBatchSize) {
4957 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4958 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4959 }
4960 }
4961 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004962 LOGE("setHalFpsRange failed");
4963 }
4964 }
4965 if (meta.exists(ANDROID_CONTROL_MODE)) {
4966 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4967 rc = extractSceneMode(meta, metaMode, mParameters);
4968 if (rc != NO_ERROR) {
4969 LOGE("extractSceneMode failed");
4970 }
4971 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004972 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004973
Thierry Strudel04e026f2016-10-10 11:27:36 -07004974 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4975 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4976 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4977 rc = setVideoHdrMode(mParameters, vhdr);
4978 if (rc != NO_ERROR) {
4979 LOGE("setVideoHDR is failed");
4980 }
4981 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004982
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004983 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004984 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004985 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004986 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4988 sensorModeFullFov)) {
4989 rc = BAD_VALUE;
4990 }
4991 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004992 //TODO: validate the arguments, HSV scenemode should have only the
4993 //advertised fps ranges
4994
4995 /*set the capture intent, hal version, tintless, stream info,
4996 *and disenable parameters to the backend*/
4997 LOGD("set_parms META_STREAM_INFO " );
4998 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004999 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5000 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005001 mStreamConfigInfo.type[i],
5002 mStreamConfigInfo.stream_sizes[i].width,
5003 mStreamConfigInfo.stream_sizes[i].height,
5004 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005005 mStreamConfigInfo.format[i],
5006 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005007 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005008
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5010 mParameters);
5011 if (rc < 0) {
5012 LOGE("set_parms failed for hal version, stream info");
5013 }
5014
Chien-Yu Chenee335912017-02-09 17:53:20 -08005015 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5016 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005017 if (rc != NO_ERROR) {
5018 LOGE("Failed to get sensor output size");
5019 pthread_mutex_unlock(&mMutex);
5020 goto error_exit;
5021 }
5022
5023 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5024 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08005025 mSensorModeInfo.active_array_size.width,
5026 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005027
5028 /* Set batchmode before initializing channel. Since registerBuffer
5029 * internally initializes some of the channels, better set batchmode
5030 * even before first register buffer */
5031 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5032 it != mStreamInfo.end(); it++) {
5033 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5034 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5035 && mBatchSize) {
5036 rc = channel->setBatchSize(mBatchSize);
5037 //Disable per frame map unmap for HFR/batchmode case
5038 rc |= channel->setPerFrameMapUnmap(false);
5039 if (NO_ERROR != rc) {
5040 LOGE("Channel init failed %d", rc);
5041 pthread_mutex_unlock(&mMutex);
5042 goto error_exit;
5043 }
5044 }
5045 }
5046
5047 //First initialize all streams
5048 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5049 it != mStreamInfo.end(); it++) {
5050 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005051
5052 /* Initial value of NR mode is needed before stream on */
5053 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005054 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5055 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005056 setEis) {
5057 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5058 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5059 is_type = mStreamConfigInfo.is_type[i];
5060 break;
5061 }
5062 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005063 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005064 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005065 rc = channel->initialize(IS_TYPE_NONE);
5066 }
5067 if (NO_ERROR != rc) {
5068 LOGE("Channel initialization failed %d", rc);
5069 pthread_mutex_unlock(&mMutex);
5070 goto error_exit;
5071 }
5072 }
5073
5074 if (mRawDumpChannel) {
5075 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5076 if (rc != NO_ERROR) {
5077 LOGE("Error: Raw Dump Channel init failed");
5078 pthread_mutex_unlock(&mMutex);
5079 goto error_exit;
5080 }
5081 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005082 if (mHdrPlusRawSrcChannel) {
5083 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5084 if (rc != NO_ERROR) {
5085 LOGE("Error: HDR+ RAW Source Channel init failed");
5086 pthread_mutex_unlock(&mMutex);
5087 goto error_exit;
5088 }
5089 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005090 if (mSupportChannel) {
5091 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5092 if (rc < 0) {
5093 LOGE("Support channel initialization failed");
5094 pthread_mutex_unlock(&mMutex);
5095 goto error_exit;
5096 }
5097 }
5098 if (mAnalysisChannel) {
5099 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5100 if (rc < 0) {
5101 LOGE("Analysis channel initialization failed");
5102 pthread_mutex_unlock(&mMutex);
5103 goto error_exit;
5104 }
5105 }
5106 if (mDummyBatchChannel) {
5107 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5108 if (rc < 0) {
5109 LOGE("mDummyBatchChannel setBatchSize failed");
5110 pthread_mutex_unlock(&mMutex);
5111 goto error_exit;
5112 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005113 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005114 if (rc < 0) {
5115 LOGE("mDummyBatchChannel initialization failed");
5116 pthread_mutex_unlock(&mMutex);
5117 goto error_exit;
5118 }
5119 }
5120
5121 // Set bundle info
5122 rc = setBundleInfo();
5123 if (rc < 0) {
5124 LOGE("setBundleInfo failed %d", rc);
5125 pthread_mutex_unlock(&mMutex);
5126 goto error_exit;
5127 }
5128
5129 //update settings from app here
5130 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5131 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5132 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5133 }
5134 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5135 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5136 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5137 }
5138 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5139 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5140 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5141
5142 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5143 (mLinkedCameraId != mCameraId) ) {
5144 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5145 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005146 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005147 goto error_exit;
5148 }
5149 }
5150
5151 // add bundle related cameras
5152 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5153 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005154 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5155 &m_pDualCamCmdPtr->bundle_info;
5156 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005157 if (mIsDeviceLinked)
5158 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5159 else
5160 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5161
5162 pthread_mutex_lock(&gCamLock);
5163
5164 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5165 LOGE("Dualcam: Invalid Session Id ");
5166 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005167 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005168 goto error_exit;
5169 }
5170
5171 if (mIsMainCamera == 1) {
5172 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5173 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005174 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005175 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005176 // related session id should be session id of linked session
5177 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5178 } else {
5179 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5180 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005181 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005182 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005183 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5184 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005185 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005186 pthread_mutex_unlock(&gCamLock);
5187
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005188 rc = mCameraHandle->ops->set_dual_cam_cmd(
5189 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005190 if (rc < 0) {
5191 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005192 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005193 goto error_exit;
5194 }
5195 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005196 goto no_error;
5197error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005198 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005199 return rc;
5200no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005201 mWokenUpByDaemon = false;
5202 mPendingLiveRequest = 0;
5203 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005204 }
5205
Chien-Yu Chenee335912017-02-09 17:53:20 -08005206 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chened0a4c92017-05-01 18:25:03 +00005207 {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005208 Mutex::Autolock l(gHdrPlusClientLock);
5209 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5210 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5211 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5212 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5213 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5214 rc = enableHdrPlusModeLocked();
Chien-Yu Chenee335912017-02-09 17:53:20 -08005215 if (rc != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005216 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -08005217 pthread_mutex_unlock(&mMutex);
5218 return rc;
5219 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005220
5221 mFirstPreviewIntentSeen = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -08005222 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08005223 }
5224
Thierry Strudel3d639192016-09-09 11:52:26 -07005225 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005226 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005227
5228 if (mFlushPerf) {
5229 //we cannot accept any requests during flush
5230 LOGE("process_capture_request cannot proceed during flush");
5231 pthread_mutex_unlock(&mMutex);
5232 return NO_ERROR; //should return an error
5233 }
5234
5235 if (meta.exists(ANDROID_REQUEST_ID)) {
5236 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5237 mCurrentRequestId = request_id;
5238 LOGD("Received request with id: %d", request_id);
5239 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5240 LOGE("Unable to find request id field, \
5241 & no previous id available");
5242 pthread_mutex_unlock(&mMutex);
5243 return NAME_NOT_FOUND;
5244 } else {
5245 LOGD("Re-using old request id");
5246 request_id = mCurrentRequestId;
5247 }
5248
5249 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5250 request->num_output_buffers,
5251 request->input_buffer,
5252 frameNumber);
5253 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005254 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005255 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005256 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005257 uint32_t snapshotStreamId = 0;
5258 for (size_t i = 0; i < request->num_output_buffers; i++) {
5259 const camera3_stream_buffer_t& output = request->output_buffers[i];
5260 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5261
Emilian Peev7650c122017-01-19 08:24:33 -08005262 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5263 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005264 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005265 blob_request = 1;
5266 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5267 }
5268
5269 if (output.acquire_fence != -1) {
5270 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5271 close(output.acquire_fence);
5272 if (rc != OK) {
5273 LOGE("sync wait failed %d", rc);
5274 pthread_mutex_unlock(&mMutex);
5275 return rc;
5276 }
5277 }
5278
Emilian Peev0f3c3162017-03-15 12:57:46 +00005279 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5280 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005281 depthRequestPresent = true;
5282 continue;
5283 }
5284
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005285 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005286 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005287
5288 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5289 isVidBufRequested = true;
5290 }
5291 }
5292
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005293 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5294 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5295 itr++) {
5296 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5297 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5298 channel->getStreamID(channel->getStreamTypeMask());
5299
5300 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5301 isVidBufRequested = true;
5302 }
5303 }
5304
Thierry Strudel3d639192016-09-09 11:52:26 -07005305 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005306 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005307 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005308 }
5309 if (blob_request && mRawDumpChannel) {
5310 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005311 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005312 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005313 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005314 }
5315
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005316 {
5317 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5318 // Request a RAW buffer if
5319 // 1. mHdrPlusRawSrcChannel is valid.
5320 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5321 // 3. There is no pending HDR+ request.
5322 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5323 mHdrPlusPendingRequests.size() == 0) {
5324 streamsArray.stream_request[streamsArray.num_streams].streamID =
5325 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5326 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5327 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005328 }
5329
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005330 //extract capture intent
5331 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5332 mCaptureIntent =
5333 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5334 }
5335
5336 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5337 mCacMode =
5338 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5339 }
5340
5341 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005342 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005343
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005344 {
5345 Mutex::Autolock l(gHdrPlusClientLock);
5346 // If this request has a still capture intent, try to submit an HDR+ request.
5347 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5348 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5349 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5350 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005351 }
5352
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005353 if (hdrPlusRequest) {
5354 // For a HDR+ request, just set the frame parameters.
5355 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5356 if (rc < 0) {
5357 LOGE("fail to set frame parameters");
5358 pthread_mutex_unlock(&mMutex);
5359 return rc;
5360 }
5361 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005362 /* Parse the settings:
5363 * - For every request in NORMAL MODE
5364 * - For every request in HFR mode during preview only case
5365 * - For first request of every batch in HFR mode during video
5366 * recording. In batchmode the same settings except frame number is
5367 * repeated in each request of the batch.
5368 */
5369 if (!mBatchSize ||
5370 (mBatchSize && !isVidBufRequested) ||
5371 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005372 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005373 if (rc < 0) {
5374 LOGE("fail to set frame parameters");
5375 pthread_mutex_unlock(&mMutex);
5376 return rc;
5377 }
5378 }
5379 /* For batchMode HFR, setFrameParameters is not called for every
5380 * request. But only frame number of the latest request is parsed.
5381 * Keep track of first and last frame numbers in a batch so that
5382 * metadata for the frame numbers of batch can be duplicated in
5383 * handleBatchMetadta */
5384 if (mBatchSize) {
5385 if (!mToBeQueuedVidBufs) {
5386 //start of the batch
5387 mFirstFrameNumberInBatch = request->frame_number;
5388 }
5389 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5390 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5391 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005392 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005393 return BAD_VALUE;
5394 }
5395 }
5396 if (mNeedSensorRestart) {
5397 /* Unlock the mutex as restartSensor waits on the channels to be
5398 * stopped, which in turn calls stream callback functions -
5399 * handleBufferWithLock and handleMetadataWithLock */
5400 pthread_mutex_unlock(&mMutex);
5401 rc = dynamicUpdateMetaStreamInfo();
5402 if (rc != NO_ERROR) {
5403 LOGE("Restarting the sensor failed");
5404 return BAD_VALUE;
5405 }
5406 mNeedSensorRestart = false;
5407 pthread_mutex_lock(&mMutex);
5408 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005409 if(mResetInstantAEC) {
5410 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5411 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5412 mResetInstantAEC = false;
5413 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005414 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005415 if (request->input_buffer->acquire_fence != -1) {
5416 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5417 close(request->input_buffer->acquire_fence);
5418 if (rc != OK) {
5419 LOGE("input buffer sync wait failed %d", rc);
5420 pthread_mutex_unlock(&mMutex);
5421 return rc;
5422 }
5423 }
5424 }
5425
5426 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5427 mLastCustIntentFrmNum = frameNumber;
5428 }
5429 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005430 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005431 pendingRequestIterator latestRequest;
5432 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005433 pendingRequest.num_buffers = depthRequestPresent ?
5434 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005435 pendingRequest.request_id = request_id;
5436 pendingRequest.blob_request = blob_request;
5437 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005438 if (request->input_buffer) {
5439 pendingRequest.input_buffer =
5440 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5441 *(pendingRequest.input_buffer) = *(request->input_buffer);
5442 pInputBuffer = pendingRequest.input_buffer;
5443 } else {
5444 pendingRequest.input_buffer = NULL;
5445 pInputBuffer = NULL;
5446 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005447 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005448
5449 pendingRequest.pipeline_depth = 0;
5450 pendingRequest.partial_result_cnt = 0;
5451 extractJpegMetadata(mCurJpegMeta, request);
5452 pendingRequest.jpegMetadata = mCurJpegMeta;
5453 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5454 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005455 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005456 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5457 mHybridAeEnable =
5458 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5459 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005460
5461 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5462 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005463 /* DevCamDebug metadata processCaptureRequest */
5464 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5465 mDevCamDebugMetaEnable =
5466 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5467 }
5468 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5469 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005470
5471 //extract CAC info
5472 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5473 mCacMode =
5474 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5475 }
5476 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005477 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005478
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005479 // extract enableZsl info
5480 if (gExposeEnableZslKey) {
5481 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5482 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5483 mZslEnabled = pendingRequest.enableZsl;
5484 } else {
5485 pendingRequest.enableZsl = mZslEnabled;
5486 }
5487 }
5488
Thierry Strudel3d639192016-09-09 11:52:26 -07005489 PendingBuffersInRequest bufsForCurRequest;
5490 bufsForCurRequest.frame_number = frameNumber;
5491 // Mark current timestamp for the new request
5492 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005493 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005494
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005495 if (hdrPlusRequest) {
5496 // Save settings for this request.
5497 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5498 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5499
5500 // Add to pending HDR+ request queue.
5501 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5502 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5503
5504 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5505 }
5506
Thierry Strudel3d639192016-09-09 11:52:26 -07005507 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005508 if ((request->output_buffers[i].stream->data_space ==
5509 HAL_DATASPACE_DEPTH) &&
5510 (HAL_PIXEL_FORMAT_BLOB ==
5511 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005512 continue;
5513 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005514 RequestedBufferInfo requestedBuf;
5515 memset(&requestedBuf, 0, sizeof(requestedBuf));
5516 requestedBuf.stream = request->output_buffers[i].stream;
5517 requestedBuf.buffer = NULL;
5518 pendingRequest.buffers.push_back(requestedBuf);
5519
5520 // Add to buffer handle the pending buffers list
5521 PendingBufferInfo bufferInfo;
5522 bufferInfo.buffer = request->output_buffers[i].buffer;
5523 bufferInfo.stream = request->output_buffers[i].stream;
5524 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5525 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5526 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5527 frameNumber, bufferInfo.buffer,
5528 channel->getStreamTypeMask(), bufferInfo.stream->format);
5529 }
5530 // Add this request packet into mPendingBuffersMap
5531 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5532 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5533 mPendingBuffersMap.get_num_overall_buffers());
5534
5535 latestRequest = mPendingRequestsList.insert(
5536 mPendingRequestsList.end(), pendingRequest);
5537 if(mFlush) {
5538 LOGI("mFlush is true");
5539 pthread_mutex_unlock(&mMutex);
5540 return NO_ERROR;
5541 }
5542
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005543 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5544 // channel.
5545 if (!hdrPlusRequest) {
5546 int indexUsed;
5547 // Notify metadata channel we receive a request
5548 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005549
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005550 if(request->input_buffer != NULL){
5551 LOGD("Input request, frame_number %d", frameNumber);
5552 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5553 if (NO_ERROR != rc) {
5554 LOGE("fail to set reproc parameters");
5555 pthread_mutex_unlock(&mMutex);
5556 return rc;
5557 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005558 }
5559
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005560 // Call request on other streams
5561 uint32_t streams_need_metadata = 0;
5562 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5563 for (size_t i = 0; i < request->num_output_buffers; i++) {
5564 const camera3_stream_buffer_t& output = request->output_buffers[i];
5565 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5566
5567 if (channel == NULL) {
5568 LOGW("invalid channel pointer for stream");
5569 continue;
5570 }
5571
5572 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5573 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5574 output.buffer, request->input_buffer, frameNumber);
5575 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005576 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005577 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5578 if (rc < 0) {
5579 LOGE("Fail to request on picture channel");
5580 pthread_mutex_unlock(&mMutex);
5581 return rc;
5582 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005583 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005584 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5585 assert(NULL != mDepthChannel);
5586 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005587
Emilian Peev7650c122017-01-19 08:24:33 -08005588 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5589 if (rc < 0) {
5590 LOGE("Fail to map on depth buffer");
5591 pthread_mutex_unlock(&mMutex);
5592 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005593 }
Emilian Peev7650c122017-01-19 08:24:33 -08005594 } else {
5595 LOGD("snapshot request with buffer %p, frame_number %d",
5596 output.buffer, frameNumber);
5597 if (!request->settings) {
5598 rc = channel->request(output.buffer, frameNumber,
5599 NULL, mPrevParameters, indexUsed);
5600 } else {
5601 rc = channel->request(output.buffer, frameNumber,
5602 NULL, mParameters, indexUsed);
5603 }
5604 if (rc < 0) {
5605 LOGE("Fail to request on picture channel");
5606 pthread_mutex_unlock(&mMutex);
5607 return rc;
5608 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005609
Emilian Peev7650c122017-01-19 08:24:33 -08005610 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5611 uint32_t j = 0;
5612 for (j = 0; j < streamsArray.num_streams; j++) {
5613 if (streamsArray.stream_request[j].streamID == streamId) {
5614 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5615 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5616 else
5617 streamsArray.stream_request[j].buf_index = indexUsed;
5618 break;
5619 }
5620 }
5621 if (j == streamsArray.num_streams) {
5622 LOGE("Did not find matching stream to update index");
5623 assert(0);
5624 }
5625
5626 pendingBufferIter->need_metadata = true;
5627 streams_need_metadata++;
5628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005629 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005630 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5631 bool needMetadata = false;
5632 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5633 rc = yuvChannel->request(output.buffer, frameNumber,
5634 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5635 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005636 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005637 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005638 pthread_mutex_unlock(&mMutex);
5639 return rc;
5640 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005641
5642 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5643 uint32_t j = 0;
5644 for (j = 0; j < streamsArray.num_streams; j++) {
5645 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005646 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5647 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5648 else
5649 streamsArray.stream_request[j].buf_index = indexUsed;
5650 break;
5651 }
5652 }
5653 if (j == streamsArray.num_streams) {
5654 LOGE("Did not find matching stream to update index");
5655 assert(0);
5656 }
5657
5658 pendingBufferIter->need_metadata = needMetadata;
5659 if (needMetadata)
5660 streams_need_metadata += 1;
5661 LOGD("calling YUV channel request, need_metadata is %d",
5662 needMetadata);
5663 } else {
5664 LOGD("request with buffer %p, frame_number %d",
5665 output.buffer, frameNumber);
5666
5667 rc = channel->request(output.buffer, frameNumber, indexUsed);
5668
5669 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5670 uint32_t j = 0;
5671 for (j = 0; j < streamsArray.num_streams; j++) {
5672 if (streamsArray.stream_request[j].streamID == streamId) {
5673 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5674 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5675 else
5676 streamsArray.stream_request[j].buf_index = indexUsed;
5677 break;
5678 }
5679 }
5680 if (j == streamsArray.num_streams) {
5681 LOGE("Did not find matching stream to update index");
5682 assert(0);
5683 }
5684
5685 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5686 && mBatchSize) {
5687 mToBeQueuedVidBufs++;
5688 if (mToBeQueuedVidBufs == mBatchSize) {
5689 channel->queueBatchBuf();
5690 }
5691 }
5692 if (rc < 0) {
5693 LOGE("request failed");
5694 pthread_mutex_unlock(&mMutex);
5695 return rc;
5696 }
5697 }
5698 pendingBufferIter++;
5699 }
5700
5701 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5702 itr++) {
5703 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5704
5705 if (channel == NULL) {
5706 LOGE("invalid channel pointer for stream");
5707 assert(0);
5708 return BAD_VALUE;
5709 }
5710
5711 InternalRequest requestedStream;
5712 requestedStream = (*itr);
5713
5714
5715 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5716 LOGD("snapshot request internally input buffer %p, frame_number %d",
5717 request->input_buffer, frameNumber);
5718 if(request->input_buffer != NULL){
5719 rc = channel->request(NULL, frameNumber,
5720 pInputBuffer, &mReprocMeta, indexUsed, true,
5721 requestedStream.meteringOnly);
5722 if (rc < 0) {
5723 LOGE("Fail to request on picture channel");
5724 pthread_mutex_unlock(&mMutex);
5725 return rc;
5726 }
5727 } else {
5728 LOGD("snapshot request with frame_number %d", frameNumber);
5729 if (!request->settings) {
5730 rc = channel->request(NULL, frameNumber,
5731 NULL, mPrevParameters, indexUsed, true,
5732 requestedStream.meteringOnly);
5733 } else {
5734 rc = channel->request(NULL, frameNumber,
5735 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5736 }
5737 if (rc < 0) {
5738 LOGE("Fail to request on picture channel");
5739 pthread_mutex_unlock(&mMutex);
5740 return rc;
5741 }
5742
5743 if ((*itr).meteringOnly != 1) {
5744 requestedStream.need_metadata = 1;
5745 streams_need_metadata++;
5746 }
5747 }
5748
5749 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5750 uint32_t j = 0;
5751 for (j = 0; j < streamsArray.num_streams; j++) {
5752 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005753 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5754 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5755 else
5756 streamsArray.stream_request[j].buf_index = indexUsed;
5757 break;
5758 }
5759 }
5760 if (j == streamsArray.num_streams) {
5761 LOGE("Did not find matching stream to update index");
5762 assert(0);
5763 }
5764
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005765 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005766 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005767 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005768 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005769 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005770 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005771 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005772
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005773 //If 2 streams have need_metadata set to true, fail the request, unless
5774 //we copy/reference count the metadata buffer
5775 if (streams_need_metadata > 1) {
5776 LOGE("not supporting request in which two streams requires"
5777 " 2 HAL metadata for reprocessing");
5778 pthread_mutex_unlock(&mMutex);
5779 return -EINVAL;
5780 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005781
Emilian Peev7650c122017-01-19 08:24:33 -08005782 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5783 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5784 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5785 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5786 pthread_mutex_unlock(&mMutex);
5787 return BAD_VALUE;
5788 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005789 if (request->input_buffer == NULL) {
5790 /* Set the parameters to backend:
5791 * - For every request in NORMAL MODE
5792 * - For every request in HFR mode during preview only case
5793 * - Once every batch in HFR mode during video recording
5794 */
5795 if (!mBatchSize ||
5796 (mBatchSize && !isVidBufRequested) ||
5797 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5798 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5799 mBatchSize, isVidBufRequested,
5800 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005801
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005802 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5803 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5804 uint32_t m = 0;
5805 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5806 if (streamsArray.stream_request[k].streamID ==
5807 mBatchedStreamsArray.stream_request[m].streamID)
5808 break;
5809 }
5810 if (m == mBatchedStreamsArray.num_streams) {
5811 mBatchedStreamsArray.stream_request\
5812 [mBatchedStreamsArray.num_streams].streamID =
5813 streamsArray.stream_request[k].streamID;
5814 mBatchedStreamsArray.stream_request\
5815 [mBatchedStreamsArray.num_streams].buf_index =
5816 streamsArray.stream_request[k].buf_index;
5817 mBatchedStreamsArray.num_streams =
5818 mBatchedStreamsArray.num_streams + 1;
5819 }
5820 }
5821 streamsArray = mBatchedStreamsArray;
5822 }
5823 /* Update stream id of all the requested buffers */
5824 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5825 streamsArray)) {
5826 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005827 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005828 return BAD_VALUE;
5829 }
5830
5831 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5832 mParameters);
5833 if (rc < 0) {
5834 LOGE("set_parms failed");
5835 }
5836 /* reset to zero coz, the batch is queued */
5837 mToBeQueuedVidBufs = 0;
5838 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5839 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5840 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005841 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5842 uint32_t m = 0;
5843 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5844 if (streamsArray.stream_request[k].streamID ==
5845 mBatchedStreamsArray.stream_request[m].streamID)
5846 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005847 }
5848 if (m == mBatchedStreamsArray.num_streams) {
5849 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5850 streamID = streamsArray.stream_request[k].streamID;
5851 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5852 buf_index = streamsArray.stream_request[k].buf_index;
5853 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5854 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005855 }
5856 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005857 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005858
5859 // Start all streams after the first setting is sent, so that the
5860 // setting can be applied sooner: (0 + apply_delay)th frame.
5861 if (mState == CONFIGURED && mChannelHandle) {
5862 //Then start them.
5863 LOGH("Start META Channel");
5864 rc = mMetadataChannel->start();
5865 if (rc < 0) {
5866 LOGE("META channel start failed");
5867 pthread_mutex_unlock(&mMutex);
5868 return rc;
5869 }
5870
5871 if (mAnalysisChannel) {
5872 rc = mAnalysisChannel->start();
5873 if (rc < 0) {
5874 LOGE("Analysis channel start failed");
5875 mMetadataChannel->stop();
5876 pthread_mutex_unlock(&mMutex);
5877 return rc;
5878 }
5879 }
5880
5881 if (mSupportChannel) {
5882 rc = mSupportChannel->start();
5883 if (rc < 0) {
5884 LOGE("Support channel start failed");
5885 mMetadataChannel->stop();
5886 /* Although support and analysis are mutually exclusive today
5887 adding it in anycase for future proofing */
5888 if (mAnalysisChannel) {
5889 mAnalysisChannel->stop();
5890 }
5891 pthread_mutex_unlock(&mMutex);
5892 return rc;
5893 }
5894 }
5895 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5896 it != mStreamInfo.end(); it++) {
5897 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5898 LOGH("Start Processing Channel mask=%d",
5899 channel->getStreamTypeMask());
5900 rc = channel->start();
5901 if (rc < 0) {
5902 LOGE("channel start failed");
5903 pthread_mutex_unlock(&mMutex);
5904 return rc;
5905 }
5906 }
5907
5908 if (mRawDumpChannel) {
5909 LOGD("Starting raw dump stream");
5910 rc = mRawDumpChannel->start();
5911 if (rc != NO_ERROR) {
5912 LOGE("Error Starting Raw Dump Channel");
5913 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5914 it != mStreamInfo.end(); it++) {
5915 QCamera3Channel *channel =
5916 (QCamera3Channel *)(*it)->stream->priv;
5917 LOGH("Stopping Processing Channel mask=%d",
5918 channel->getStreamTypeMask());
5919 channel->stop();
5920 }
5921 if (mSupportChannel)
5922 mSupportChannel->stop();
5923 if (mAnalysisChannel) {
5924 mAnalysisChannel->stop();
5925 }
5926 mMetadataChannel->stop();
5927 pthread_mutex_unlock(&mMutex);
5928 return rc;
5929 }
5930 }
5931
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005932 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005933 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005934 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005935 if (rc != NO_ERROR) {
5936 LOGE("start_channel failed %d", rc);
5937 pthread_mutex_unlock(&mMutex);
5938 return rc;
5939 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005940
5941 {
5942 // Configure Easel for stream on.
5943 Mutex::Autolock l(gHdrPlusClientLock);
5944 if (EaselManagerClientOpened) {
5945 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
5946 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
5947 if (rc != OK) {
5948 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5949 mCameraId, mSensorModeInfo.op_pixel_clk);
5950 pthread_mutex_unlock(&mMutex);
5951 return rc;
5952 }
5953 }
5954 }
5955
5956 // Start sensor streaming.
5957 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5958 mChannelHandle);
5959 if (rc != NO_ERROR) {
5960 LOGE("start_sensor_stream_on failed %d", rc);
5961 pthread_mutex_unlock(&mMutex);
5962 return rc;
5963 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005964 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005965 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005966 }
5967
5968 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5969
5970 mState = STARTED;
5971 // Added a timed condition wait
5972 struct timespec ts;
5973 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005974 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005975 if (rc < 0) {
5976 isValidTimeout = 0;
5977 LOGE("Error reading the real time clock!!");
5978 }
5979 else {
5980 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005981 int64_t timeout = 5;
5982 {
5983 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5984 // If there is a pending HDR+ request, the following requests may be blocked until the
5985 // HDR+ request is done. So allow a longer timeout.
5986 if (mHdrPlusPendingRequests.size() > 0) {
5987 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5988 }
5989 }
5990 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005991 }
5992 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005993 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005994 (mState != ERROR) && (mState != DEINIT)) {
5995 if (!isValidTimeout) {
5996 LOGD("Blocking on conditional wait");
5997 pthread_cond_wait(&mRequestCond, &mMutex);
5998 }
5999 else {
6000 LOGD("Blocking on timed conditional wait");
6001 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6002 if (rc == ETIMEDOUT) {
6003 rc = -ENODEV;
6004 LOGE("Unblocked on timeout!!!!");
6005 break;
6006 }
6007 }
6008 LOGD("Unblocked");
6009 if (mWokenUpByDaemon) {
6010 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006011 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006012 break;
6013 }
6014 }
6015 pthread_mutex_unlock(&mMutex);
6016
6017 return rc;
6018}
6019
6020/*===========================================================================
6021 * FUNCTION : dump
6022 *
6023 * DESCRIPTION:
6024 *
6025 * PARAMETERS :
6026 *
6027 *
6028 * RETURN :
6029 *==========================================================================*/
6030void QCamera3HardwareInterface::dump(int fd)
6031{
6032 pthread_mutex_lock(&mMutex);
6033 dprintf(fd, "\n Camera HAL3 information Begin \n");
6034
6035 dprintf(fd, "\nNumber of pending requests: %zu \n",
6036 mPendingRequestsList.size());
6037 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6038 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6039 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6040 for(pendingRequestIterator i = mPendingRequestsList.begin();
6041 i != mPendingRequestsList.end(); i++) {
6042 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6043 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6044 i->input_buffer);
6045 }
6046 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6047 mPendingBuffersMap.get_num_overall_buffers());
6048 dprintf(fd, "-------+------------------\n");
6049 dprintf(fd, " Frame | Stream type mask \n");
6050 dprintf(fd, "-------+------------------\n");
6051 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6052 for(auto &j : req.mPendingBufferList) {
6053 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6054 dprintf(fd, " %5d | %11d \n",
6055 req.frame_number, channel->getStreamTypeMask());
6056 }
6057 }
6058 dprintf(fd, "-------+------------------\n");
6059
6060 dprintf(fd, "\nPending frame drop list: %zu\n",
6061 mPendingFrameDropList.size());
6062 dprintf(fd, "-------+-----------\n");
6063 dprintf(fd, " Frame | Stream ID \n");
6064 dprintf(fd, "-------+-----------\n");
6065 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6066 i != mPendingFrameDropList.end(); i++) {
6067 dprintf(fd, " %5d | %9d \n",
6068 i->frame_number, i->stream_ID);
6069 }
6070 dprintf(fd, "-------+-----------\n");
6071
6072 dprintf(fd, "\n Camera HAL3 information End \n");
6073
6074 /* use dumpsys media.camera as trigger to send update debug level event */
6075 mUpdateDebugLevel = true;
6076 pthread_mutex_unlock(&mMutex);
6077 return;
6078}
6079
6080/*===========================================================================
6081 * FUNCTION : flush
6082 *
6083 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6084 * conditionally restarts channels
6085 *
6086 * PARAMETERS :
6087 * @ restartChannels: re-start all channels
6088 *
6089 *
6090 * RETURN :
6091 * 0 on success
6092 * Error code on failure
6093 *==========================================================================*/
6094int QCamera3HardwareInterface::flush(bool restartChannels)
6095{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006096 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006097 int32_t rc = NO_ERROR;
6098
6099 LOGD("Unblocking Process Capture Request");
6100 pthread_mutex_lock(&mMutex);
6101 mFlush = true;
6102 pthread_mutex_unlock(&mMutex);
6103
6104 rc = stopAllChannels();
6105 // unlink of dualcam
6106 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006107 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6108 &m_pDualCamCmdPtr->bundle_info;
6109 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006110 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6111 pthread_mutex_lock(&gCamLock);
6112
6113 if (mIsMainCamera == 1) {
6114 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6115 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006116 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006117 // related session id should be session id of linked session
6118 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6119 } else {
6120 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6121 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006122 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006123 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6124 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006125 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006126 pthread_mutex_unlock(&gCamLock);
6127
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006128 rc = mCameraHandle->ops->set_dual_cam_cmd(
6129 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006130 if (rc < 0) {
6131 LOGE("Dualcam: Unlink failed, but still proceed to close");
6132 }
6133 }
6134
6135 if (rc < 0) {
6136 LOGE("stopAllChannels failed");
6137 return rc;
6138 }
6139 if (mChannelHandle) {
6140 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6141 mChannelHandle);
6142 }
6143
6144 // Reset bundle info
6145 rc = setBundleInfo();
6146 if (rc < 0) {
6147 LOGE("setBundleInfo failed %d", rc);
6148 return rc;
6149 }
6150
6151 // Mutex Lock
6152 pthread_mutex_lock(&mMutex);
6153
6154 // Unblock process_capture_request
6155 mPendingLiveRequest = 0;
6156 pthread_cond_signal(&mRequestCond);
6157
6158 rc = notifyErrorForPendingRequests();
6159 if (rc < 0) {
6160 LOGE("notifyErrorForPendingRequests failed");
6161 pthread_mutex_unlock(&mMutex);
6162 return rc;
6163 }
6164
6165 mFlush = false;
6166
6167 // Start the Streams/Channels
6168 if (restartChannels) {
6169 rc = startAllChannels();
6170 if (rc < 0) {
6171 LOGE("startAllChannels failed");
6172 pthread_mutex_unlock(&mMutex);
6173 return rc;
6174 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006175 if (mChannelHandle) {
6176 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006177 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006178 if (rc < 0) {
6179 LOGE("start_channel failed");
6180 pthread_mutex_unlock(&mMutex);
6181 return rc;
6182 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006183 }
6184 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006185 pthread_mutex_unlock(&mMutex);
6186
6187 return 0;
6188}
6189
6190/*===========================================================================
6191 * FUNCTION : flushPerf
6192 *
6193 * DESCRIPTION: This is the performance optimization version of flush that does
6194 * not use stream off, rather flushes the system
6195 *
6196 * PARAMETERS :
6197 *
6198 *
6199 * RETURN : 0 : success
6200 * -EINVAL: input is malformed (device is not valid)
6201 * -ENODEV: if the device has encountered a serious error
6202 *==========================================================================*/
6203int QCamera3HardwareInterface::flushPerf()
6204{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006205 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006206 int32_t rc = 0;
6207 struct timespec timeout;
6208 bool timed_wait = false;
6209
6210 pthread_mutex_lock(&mMutex);
6211 mFlushPerf = true;
6212 mPendingBuffersMap.numPendingBufsAtFlush =
6213 mPendingBuffersMap.get_num_overall_buffers();
6214 LOGD("Calling flush. Wait for %d buffers to return",
6215 mPendingBuffersMap.numPendingBufsAtFlush);
6216
6217 /* send the flush event to the backend */
6218 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6219 if (rc < 0) {
6220 LOGE("Error in flush: IOCTL failure");
6221 mFlushPerf = false;
6222 pthread_mutex_unlock(&mMutex);
6223 return -ENODEV;
6224 }
6225
6226 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6227 LOGD("No pending buffers in HAL, return flush");
6228 mFlushPerf = false;
6229 pthread_mutex_unlock(&mMutex);
6230 return rc;
6231 }
6232
6233 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006234 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006235 if (rc < 0) {
6236 LOGE("Error reading the real time clock, cannot use timed wait");
6237 } else {
6238 timeout.tv_sec += FLUSH_TIMEOUT;
6239 timed_wait = true;
6240 }
6241
6242 //Block on conditional variable
6243 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6244 LOGD("Waiting on mBuffersCond");
6245 if (!timed_wait) {
6246 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6247 if (rc != 0) {
6248 LOGE("pthread_cond_wait failed due to rc = %s",
6249 strerror(rc));
6250 break;
6251 }
6252 } else {
6253 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6254 if (rc != 0) {
6255 LOGE("pthread_cond_timedwait failed due to rc = %s",
6256 strerror(rc));
6257 break;
6258 }
6259 }
6260 }
6261 if (rc != 0) {
6262 mFlushPerf = false;
6263 pthread_mutex_unlock(&mMutex);
6264 return -ENODEV;
6265 }
6266
6267 LOGD("Received buffers, now safe to return them");
6268
6269 //make sure the channels handle flush
6270 //currently only required for the picture channel to release snapshot resources
6271 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6272 it != mStreamInfo.end(); it++) {
6273 QCamera3Channel *channel = (*it)->channel;
6274 if (channel) {
6275 rc = channel->flush();
6276 if (rc) {
6277 LOGE("Flushing the channels failed with error %d", rc);
6278 // even though the channel flush failed we need to continue and
6279 // return the buffers we have to the framework, however the return
6280 // value will be an error
6281 rc = -ENODEV;
6282 }
6283 }
6284 }
6285
6286 /* notify the frameworks and send errored results */
6287 rc = notifyErrorForPendingRequests();
6288 if (rc < 0) {
6289 LOGE("notifyErrorForPendingRequests failed");
6290 pthread_mutex_unlock(&mMutex);
6291 return rc;
6292 }
6293
6294 //unblock process_capture_request
6295 mPendingLiveRequest = 0;
6296 unblockRequestIfNecessary();
6297
6298 mFlushPerf = false;
6299 pthread_mutex_unlock(&mMutex);
6300 LOGD ("Flush Operation complete. rc = %d", rc);
6301 return rc;
6302}
6303
6304/*===========================================================================
6305 * FUNCTION : handleCameraDeviceError
6306 *
6307 * DESCRIPTION: This function calls internal flush and notifies the error to
6308 * framework and updates the state variable.
6309 *
6310 * PARAMETERS : None
6311 *
6312 * RETURN : NO_ERROR on Success
6313 * Error code on failure
6314 *==========================================================================*/
6315int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6316{
6317 int32_t rc = NO_ERROR;
6318
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006319 {
6320 Mutex::Autolock lock(mFlushLock);
6321 pthread_mutex_lock(&mMutex);
6322 if (mState != ERROR) {
6323 //if mState != ERROR, nothing to be done
6324 pthread_mutex_unlock(&mMutex);
6325 return NO_ERROR;
6326 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006327 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006328
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006329 rc = flush(false /* restart channels */);
6330 if (NO_ERROR != rc) {
6331 LOGE("internal flush to handle mState = ERROR failed");
6332 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006333
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006334 pthread_mutex_lock(&mMutex);
6335 mState = DEINIT;
6336 pthread_mutex_unlock(&mMutex);
6337 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006338
6339 camera3_notify_msg_t notify_msg;
6340 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6341 notify_msg.type = CAMERA3_MSG_ERROR;
6342 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6343 notify_msg.message.error.error_stream = NULL;
6344 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006345 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006346
6347 return rc;
6348}
6349
6350/*===========================================================================
6351 * FUNCTION : captureResultCb
6352 *
6353 * DESCRIPTION: Callback handler for all capture result
6354 * (streams, as well as metadata)
6355 *
6356 * PARAMETERS :
6357 * @metadata : metadata information
6358 * @buffer : actual gralloc buffer to be returned to frameworks.
6359 * NULL if metadata.
6360 *
6361 * RETURN : NONE
6362 *==========================================================================*/
6363void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6364 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6365{
6366 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006367 pthread_mutex_lock(&mMutex);
6368 uint8_t batchSize = mBatchSize;
6369 pthread_mutex_unlock(&mMutex);
6370 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006371 handleBatchMetadata(metadata_buf,
6372 true /* free_and_bufdone_meta_buf */);
6373 } else { /* mBatchSize = 0 */
6374 hdrPlusPerfLock(metadata_buf);
6375 pthread_mutex_lock(&mMutex);
6376 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006377 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006378 true /* last urgent frame of batch metadata */,
6379 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006380 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006381 pthread_mutex_unlock(&mMutex);
6382 }
6383 } else if (isInputBuffer) {
6384 pthread_mutex_lock(&mMutex);
6385 handleInputBufferWithLock(frame_number);
6386 pthread_mutex_unlock(&mMutex);
6387 } else {
6388 pthread_mutex_lock(&mMutex);
6389 handleBufferWithLock(buffer, frame_number);
6390 pthread_mutex_unlock(&mMutex);
6391 }
6392 return;
6393}
6394
6395/*===========================================================================
6396 * FUNCTION : getReprocessibleOutputStreamId
6397 *
6398 * DESCRIPTION: Get source output stream id for the input reprocess stream
6399 * based on size and format, which would be the largest
6400 * output stream if an input stream exists.
6401 *
6402 * PARAMETERS :
6403 * @id : return the stream id if found
6404 *
6405 * RETURN : int32_t type of status
6406 * NO_ERROR -- success
6407 * none-zero failure code
6408 *==========================================================================*/
6409int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6410{
6411 /* check if any output or bidirectional stream with the same size and format
6412 and return that stream */
6413 if ((mInputStreamInfo.dim.width > 0) &&
6414 (mInputStreamInfo.dim.height > 0)) {
6415 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6416 it != mStreamInfo.end(); it++) {
6417
6418 camera3_stream_t *stream = (*it)->stream;
6419 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6420 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6421 (stream->format == mInputStreamInfo.format)) {
6422 // Usage flag for an input stream and the source output stream
6423 // may be different.
6424 LOGD("Found reprocessible output stream! %p", *it);
6425 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6426 stream->usage, mInputStreamInfo.usage);
6427
6428 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6429 if (channel != NULL && channel->mStreams[0]) {
6430 id = channel->mStreams[0]->getMyServerID();
6431 return NO_ERROR;
6432 }
6433 }
6434 }
6435 } else {
6436 LOGD("No input stream, so no reprocessible output stream");
6437 }
6438 return NAME_NOT_FOUND;
6439}
6440
6441/*===========================================================================
6442 * FUNCTION : lookupFwkName
6443 *
6444 * DESCRIPTION: In case the enum is not same in fwk and backend
6445 * make sure the parameter is correctly propogated
6446 *
6447 * PARAMETERS :
6448 * @arr : map between the two enums
6449 * @len : len of the map
6450 * @hal_name : name of the hal_parm to map
6451 *
6452 * RETURN : int type of status
6453 * fwk_name -- success
6454 * none-zero failure code
6455 *==========================================================================*/
6456template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6457 size_t len, halType hal_name)
6458{
6459
6460 for (size_t i = 0; i < len; i++) {
6461 if (arr[i].hal_name == hal_name) {
6462 return arr[i].fwk_name;
6463 }
6464 }
6465
6466 /* Not able to find matching framework type is not necessarily
6467 * an error case. This happens when mm-camera supports more attributes
6468 * than the frameworks do */
6469 LOGH("Cannot find matching framework type");
6470 return NAME_NOT_FOUND;
6471}
6472
6473/*===========================================================================
6474 * FUNCTION : lookupHalName
6475 *
6476 * DESCRIPTION: In case the enum is not same in fwk and backend
6477 * make sure the parameter is correctly propogated
6478 *
6479 * PARAMETERS :
6480 * @arr : map between the two enums
6481 * @len : len of the map
6482 * @fwk_name : name of the hal_parm to map
6483 *
6484 * RETURN : int32_t type of status
6485 * hal_name -- success
6486 * none-zero failure code
6487 *==========================================================================*/
6488template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6489 size_t len, fwkType fwk_name)
6490{
6491 for (size_t i = 0; i < len; i++) {
6492 if (arr[i].fwk_name == fwk_name) {
6493 return arr[i].hal_name;
6494 }
6495 }
6496
6497 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6498 return NAME_NOT_FOUND;
6499}
6500
6501/*===========================================================================
6502 * FUNCTION : lookupProp
6503 *
6504 * DESCRIPTION: lookup a value by its name
6505 *
6506 * PARAMETERS :
6507 * @arr : map between the two enums
6508 * @len : size of the map
6509 * @name : name to be looked up
6510 *
6511 * RETURN : Value if found
6512 * CAM_CDS_MODE_MAX if not found
6513 *==========================================================================*/
6514template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6515 size_t len, const char *name)
6516{
6517 if (name) {
6518 for (size_t i = 0; i < len; i++) {
6519 if (!strcmp(arr[i].desc, name)) {
6520 return arr[i].val;
6521 }
6522 }
6523 }
6524 return CAM_CDS_MODE_MAX;
6525}
6526
6527/*===========================================================================
6528 *
6529 * DESCRIPTION:
6530 *
6531 * PARAMETERS :
6532 * @metadata : metadata information from callback
6533 * @timestamp: metadata buffer timestamp
6534 * @request_id: request id
6535 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006536 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006537 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6538 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006539 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006540 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6541 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006542 *
6543 * RETURN : camera_metadata_t*
6544 * metadata in a format specified by fwk
6545 *==========================================================================*/
6546camera_metadata_t*
6547QCamera3HardwareInterface::translateFromHalMetadata(
6548 metadata_buffer_t *metadata,
6549 nsecs_t timestamp,
6550 int32_t request_id,
6551 const CameraMetadata& jpegMetadata,
6552 uint8_t pipeline_depth,
6553 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006554 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006555 /* DevCamDebug metadata translateFromHalMetadata argument */
6556 uint8_t DevCamDebug_meta_enable,
6557 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006558 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006559 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006560 bool lastMetadataInBatch,
6561 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006562{
6563 CameraMetadata camMetadata;
6564 camera_metadata_t *resultMetadata;
6565
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006566 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006567 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6568 * Timestamp is needed because it's used for shutter notify calculation.
6569 * */
6570 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6571 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006572 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006573 }
6574
Thierry Strudel3d639192016-09-09 11:52:26 -07006575 if (jpegMetadata.entryCount())
6576 camMetadata.append(jpegMetadata);
6577
6578 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6579 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6580 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6581 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006582 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006583 if (mBatchSize == 0) {
6584 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6585 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6586 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006587
Samuel Ha68ba5172016-12-15 18:41:12 -08006588 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6589 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6590 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6591 // DevCamDebug metadata translateFromHalMetadata AF
6592 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6593 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6594 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6595 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6596 }
6597 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6598 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6599 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6600 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6601 }
6602 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6603 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6604 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6605 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6606 }
6607 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6608 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6609 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6610 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6611 }
6612 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6613 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6614 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6615 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6616 }
6617 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6618 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6619 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6620 *DevCamDebug_af_monitor_pdaf_target_pos;
6621 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6622 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6623 }
6624 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6625 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6626 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6627 *DevCamDebug_af_monitor_pdaf_confidence;
6628 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6629 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6630 }
6631 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6632 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6633 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6634 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6635 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6636 }
6637 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6638 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6639 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6640 *DevCamDebug_af_monitor_tof_target_pos;
6641 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6642 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6643 }
6644 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6645 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6646 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6647 *DevCamDebug_af_monitor_tof_confidence;
6648 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6649 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6650 }
6651 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6652 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6653 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6654 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6655 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6656 }
6657 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6658 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6659 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6660 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6661 &fwk_DevCamDebug_af_monitor_type_select, 1);
6662 }
6663 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6664 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6665 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6666 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6667 &fwk_DevCamDebug_af_monitor_refocus, 1);
6668 }
6669 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6670 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6671 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6672 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6673 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6674 }
6675 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6676 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6677 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6678 *DevCamDebug_af_search_pdaf_target_pos;
6679 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6680 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6681 }
6682 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6683 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6684 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6685 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6686 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6687 }
6688 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6689 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6690 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6691 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6692 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6693 }
6694 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6695 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6696 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6697 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6698 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6699 }
6700 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6701 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6702 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6703 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6704 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6705 }
6706 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6707 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6708 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6709 *DevCamDebug_af_search_tof_target_pos;
6710 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6711 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6712 }
6713 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6714 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6715 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6716 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6717 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6718 }
6719 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6720 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6721 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6722 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6723 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6724 }
6725 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6726 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6727 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6728 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6729 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6730 }
6731 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6732 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6733 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6734 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6735 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6736 }
6737 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6738 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6739 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6740 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6741 &fwk_DevCamDebug_af_search_type_select, 1);
6742 }
6743 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6744 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6745 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6746 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6747 &fwk_DevCamDebug_af_search_next_pos, 1);
6748 }
6749 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6750 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6751 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6752 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6753 &fwk_DevCamDebug_af_search_target_pos, 1);
6754 }
6755 // DevCamDebug metadata translateFromHalMetadata AEC
6756 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6757 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6758 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6759 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6760 }
6761 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6762 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6763 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6764 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6765 }
6766 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6767 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6768 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6769 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6770 }
6771 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6772 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6773 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6774 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6775 }
6776 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6777 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6778 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6779 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6780 }
6781 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6782 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6783 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6784 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6785 }
6786 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6787 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6788 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6789 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6790 }
6791 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6792 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6793 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6794 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6795 }
Samuel Ha34229982017-02-17 13:51:11 -08006796 // DevCamDebug metadata translateFromHalMetadata zzHDR
6797 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6798 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6799 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6800 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6801 }
6802 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6803 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006804 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006805 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6806 }
6807 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6808 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6809 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6810 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6811 }
6812 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6813 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006814 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006815 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6816 }
6817 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6818 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6819 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6820 *DevCamDebug_aec_hdr_sensitivity_ratio;
6821 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6822 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6823 }
6824 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6825 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6826 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6827 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6828 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6829 }
6830 // DevCamDebug metadata translateFromHalMetadata ADRC
6831 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6832 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6833 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6834 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6835 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6836 }
6837 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6838 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6839 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6840 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6841 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6842 }
6843 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6844 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6845 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6846 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6847 }
6848 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6849 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6850 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6851 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6852 }
6853 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6854 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6855 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6856 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6857 }
6858 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6859 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6860 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6861 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6862 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006863 // DevCamDebug metadata translateFromHalMetadata AWB
6864 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6865 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6866 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6867 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6868 }
6869 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6870 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6871 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6872 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6873 }
6874 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6875 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6876 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6877 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6878 }
6879 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6880 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6881 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6882 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6883 }
6884 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6885 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6886 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6887 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6888 }
6889 }
6890 // atrace_end(ATRACE_TAG_ALWAYS);
6891
Thierry Strudel3d639192016-09-09 11:52:26 -07006892 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6893 int64_t fwk_frame_number = *frame_number;
6894 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6895 }
6896
6897 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6898 int32_t fps_range[2];
6899 fps_range[0] = (int32_t)float_range->min_fps;
6900 fps_range[1] = (int32_t)float_range->max_fps;
6901 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6902 fps_range, 2);
6903 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6904 fps_range[0], fps_range[1]);
6905 }
6906
6907 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6908 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6909 }
6910
6911 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6912 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6913 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6914 *sceneMode);
6915 if (NAME_NOT_FOUND != val) {
6916 uint8_t fwkSceneMode = (uint8_t)val;
6917 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6918 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6919 fwkSceneMode);
6920 }
6921 }
6922
6923 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6924 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6925 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6926 }
6927
6928 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6929 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6930 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6931 }
6932
6933 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6934 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6935 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6936 }
6937
6938 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6939 CAM_INTF_META_EDGE_MODE, metadata) {
6940 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6941 }
6942
6943 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6944 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6945 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6946 }
6947
6948 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6949 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6950 }
6951
6952 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6953 if (0 <= *flashState) {
6954 uint8_t fwk_flashState = (uint8_t) *flashState;
6955 if (!gCamCapability[mCameraId]->flash_available) {
6956 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6957 }
6958 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6959 }
6960 }
6961
6962 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6963 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6964 if (NAME_NOT_FOUND != val) {
6965 uint8_t fwk_flashMode = (uint8_t)val;
6966 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6967 }
6968 }
6969
6970 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6971 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6972 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6973 }
6974
6975 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6976 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6977 }
6978
6979 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6980 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6981 }
6982
6983 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6984 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6985 }
6986
6987 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6988 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6989 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6990 }
6991
6992 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6993 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6994 LOGD("fwk_videoStab = %d", fwk_videoStab);
6995 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6996 } else {
6997 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6998 // and so hardcoding the Video Stab result to OFF mode.
6999 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7000 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007001 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007002 }
7003
7004 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7005 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7006 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7007 }
7008
7009 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7010 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7011 }
7012
Thierry Strudel3d639192016-09-09 11:52:26 -07007013 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7014 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007015 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007016
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007017 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7018 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007019
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007020 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007021 blackLevelAppliedPattern->cam_black_level[0],
7022 blackLevelAppliedPattern->cam_black_level[1],
7023 blackLevelAppliedPattern->cam_black_level[2],
7024 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007025 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7026 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007027
7028#ifndef USE_HAL_3_3
7029 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307030 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007031 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307032 fwk_blackLevelInd[0] /= 16.0;
7033 fwk_blackLevelInd[1] /= 16.0;
7034 fwk_blackLevelInd[2] /= 16.0;
7035 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007036 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7037 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007038#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007039 }
7040
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007041#ifndef USE_HAL_3_3
7042 // Fixed whitelevel is used by ISP/Sensor
7043 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7044 &gCamCapability[mCameraId]->white_level, 1);
7045#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007046
7047 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7048 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7049 int32_t scalerCropRegion[4];
7050 scalerCropRegion[0] = hScalerCropRegion->left;
7051 scalerCropRegion[1] = hScalerCropRegion->top;
7052 scalerCropRegion[2] = hScalerCropRegion->width;
7053 scalerCropRegion[3] = hScalerCropRegion->height;
7054
7055 // Adjust crop region from sensor output coordinate system to active
7056 // array coordinate system.
7057 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7058 scalerCropRegion[2], scalerCropRegion[3]);
7059
7060 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7061 }
7062
7063 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7064 LOGD("sensorExpTime = %lld", *sensorExpTime);
7065 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7066 }
7067
7068 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7069 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7070 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7071 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7072 }
7073
7074 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7075 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7076 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7077 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7078 sensorRollingShutterSkew, 1);
7079 }
7080
7081 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7082 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7083 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7084
7085 //calculate the noise profile based on sensitivity
7086 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7087 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7088 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7089 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7090 noise_profile[i] = noise_profile_S;
7091 noise_profile[i+1] = noise_profile_O;
7092 }
7093 LOGD("noise model entry (S, O) is (%f, %f)",
7094 noise_profile_S, noise_profile_O);
7095 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7096 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7097 }
7098
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007099#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007100 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007101 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007102 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007103 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007104 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7105 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7106 }
7107 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007108#endif
7109
Thierry Strudel3d639192016-09-09 11:52:26 -07007110 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7111 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7112 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7113 }
7114
7115 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7116 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7117 *faceDetectMode);
7118 if (NAME_NOT_FOUND != val) {
7119 uint8_t fwk_faceDetectMode = (uint8_t)val;
7120 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7121
7122 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7123 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7124 CAM_INTF_META_FACE_DETECTION, metadata) {
7125 uint8_t numFaces = MIN(
7126 faceDetectionInfo->num_faces_detected, MAX_ROI);
7127 int32_t faceIds[MAX_ROI];
7128 uint8_t faceScores[MAX_ROI];
7129 int32_t faceRectangles[MAX_ROI * 4];
7130 int32_t faceLandmarks[MAX_ROI * 6];
7131 size_t j = 0, k = 0;
7132
7133 for (size_t i = 0; i < numFaces; i++) {
7134 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7135 // Adjust crop region from sensor output coordinate system to active
7136 // array coordinate system.
7137 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7138 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7139 rect.width, rect.height);
7140
7141 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7142 faceRectangles+j, -1);
7143
Jason Lee8ce36fa2017-04-19 19:40:37 -07007144 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7145 "bottom-right (%d, %d)",
7146 faceDetectionInfo->frame_id, i,
7147 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7148 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7149
Thierry Strudel3d639192016-09-09 11:52:26 -07007150 j+= 4;
7151 }
7152 if (numFaces <= 0) {
7153 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7154 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7155 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7156 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7157 }
7158
7159 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7160 numFaces);
7161 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7162 faceRectangles, numFaces * 4U);
7163 if (fwk_faceDetectMode ==
7164 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7165 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7166 CAM_INTF_META_FACE_LANDMARK, metadata) {
7167
7168 for (size_t i = 0; i < numFaces; i++) {
7169 // Map the co-ordinate sensor output coordinate system to active
7170 // array coordinate system.
7171 mCropRegionMapper.toActiveArray(
7172 landmarks->face_landmarks[i].left_eye_center.x,
7173 landmarks->face_landmarks[i].left_eye_center.y);
7174 mCropRegionMapper.toActiveArray(
7175 landmarks->face_landmarks[i].right_eye_center.x,
7176 landmarks->face_landmarks[i].right_eye_center.y);
7177 mCropRegionMapper.toActiveArray(
7178 landmarks->face_landmarks[i].mouth_center.x,
7179 landmarks->face_landmarks[i].mouth_center.y);
7180
7181 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007182
7183 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7184 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7185 faceDetectionInfo->frame_id, i,
7186 faceLandmarks[k + LEFT_EYE_X],
7187 faceLandmarks[k + LEFT_EYE_Y],
7188 faceLandmarks[k + RIGHT_EYE_X],
7189 faceLandmarks[k + RIGHT_EYE_Y],
7190 faceLandmarks[k + MOUTH_X],
7191 faceLandmarks[k + MOUTH_Y]);
7192
Thierry Strudel04e026f2016-10-10 11:27:36 -07007193 k+= TOTAL_LANDMARK_INDICES;
7194 }
7195 } else {
7196 for (size_t i = 0; i < numFaces; i++) {
7197 setInvalidLandmarks(faceLandmarks+k);
7198 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007199 }
7200 }
7201
Jason Lee49619db2017-04-13 12:07:22 -07007202 for (size_t i = 0; i < numFaces; i++) {
7203 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7204
7205 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7206 faceDetectionInfo->frame_id, i, faceIds[i]);
7207 }
7208
Thierry Strudel3d639192016-09-09 11:52:26 -07007209 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7210 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7211 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007212 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007213 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7214 CAM_INTF_META_FACE_BLINK, metadata) {
7215 uint8_t detected[MAX_ROI];
7216 uint8_t degree[MAX_ROI * 2];
7217 for (size_t i = 0; i < numFaces; i++) {
7218 detected[i] = blinks->blink[i].blink_detected;
7219 degree[2 * i] = blinks->blink[i].left_blink;
7220 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007221
Jason Lee49619db2017-04-13 12:07:22 -07007222 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7223 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7224 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7225 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007226 }
7227 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7228 detected, numFaces);
7229 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7230 degree, numFaces * 2);
7231 }
7232 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7233 CAM_INTF_META_FACE_SMILE, metadata) {
7234 uint8_t degree[MAX_ROI];
7235 uint8_t confidence[MAX_ROI];
7236 for (size_t i = 0; i < numFaces; i++) {
7237 degree[i] = smiles->smile[i].smile_degree;
7238 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007239
Jason Lee49619db2017-04-13 12:07:22 -07007240 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7241 "smile_degree=%d, smile_score=%d",
7242 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007243 }
7244 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7245 degree, numFaces);
7246 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7247 confidence, numFaces);
7248 }
7249 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7250 CAM_INTF_META_FACE_GAZE, metadata) {
7251 int8_t angle[MAX_ROI];
7252 int32_t direction[MAX_ROI * 3];
7253 int8_t degree[MAX_ROI * 2];
7254 for (size_t i = 0; i < numFaces; i++) {
7255 angle[i] = gazes->gaze[i].gaze_angle;
7256 direction[3 * i] = gazes->gaze[i].updown_dir;
7257 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7258 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7259 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7260 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007261
7262 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7263 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7264 "left_right_gaze=%d, top_bottom_gaze=%d",
7265 faceDetectionInfo->frame_id, i, angle[i],
7266 direction[3 * i], direction[3 * i + 1],
7267 direction[3 * i + 2],
7268 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007269 }
7270 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7271 (uint8_t *)angle, numFaces);
7272 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7273 direction, numFaces * 3);
7274 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7275 (uint8_t *)degree, numFaces * 2);
7276 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007277 }
7278 }
7279 }
7280 }
7281
7282 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7283 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007284 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007285 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007286 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007287
Shuzhen Wang14415f52016-11-16 18:26:18 -08007288 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7289 histogramBins = *histBins;
7290 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7291 }
7292
7293 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007294 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7295 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007296 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007297
7298 switch (stats_data->type) {
7299 case CAM_HISTOGRAM_TYPE_BAYER:
7300 switch (stats_data->bayer_stats.data_type) {
7301 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007302 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7303 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007304 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007305 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7306 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007307 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007308 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7309 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007310 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007311 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007312 case CAM_STATS_CHANNEL_R:
7313 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007314 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7315 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007316 }
7317 break;
7318 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007319 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007320 break;
7321 }
7322
Shuzhen Wang14415f52016-11-16 18:26:18 -08007323 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007324 }
7325 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007326 }
7327
7328 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7329 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7330 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7331 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7332 }
7333
7334 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7335 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7336 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7337 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7338 }
7339
7340 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7341 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7342 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7343 CAM_MAX_SHADING_MAP_HEIGHT);
7344 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7345 CAM_MAX_SHADING_MAP_WIDTH);
7346 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7347 lensShadingMap->lens_shading, 4U * map_width * map_height);
7348 }
7349
7350 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7351 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7352 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7353 }
7354
7355 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7356 //Populate CAM_INTF_META_TONEMAP_CURVES
7357 /* ch0 = G, ch 1 = B, ch 2 = R*/
7358 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7359 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7360 tonemap->tonemap_points_cnt,
7361 CAM_MAX_TONEMAP_CURVE_SIZE);
7362 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7363 }
7364
7365 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7366 &tonemap->curves[0].tonemap_points[0][0],
7367 tonemap->tonemap_points_cnt * 2);
7368
7369 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7370 &tonemap->curves[1].tonemap_points[0][0],
7371 tonemap->tonemap_points_cnt * 2);
7372
7373 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7374 &tonemap->curves[2].tonemap_points[0][0],
7375 tonemap->tonemap_points_cnt * 2);
7376 }
7377
7378 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7379 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7380 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7381 CC_GAIN_MAX);
7382 }
7383
7384 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7385 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7386 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7387 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7388 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7389 }
7390
7391 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7392 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7393 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7394 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7395 toneCurve->tonemap_points_cnt,
7396 CAM_MAX_TONEMAP_CURVE_SIZE);
7397 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7398 }
7399 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7400 (float*)toneCurve->curve.tonemap_points,
7401 toneCurve->tonemap_points_cnt * 2);
7402 }
7403
7404 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7405 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7406 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7407 predColorCorrectionGains->gains, 4);
7408 }
7409
7410 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7411 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7412 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7413 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7414 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7415 }
7416
7417 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7418 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7419 }
7420
7421 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7422 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7423 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7424 }
7425
7426 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7427 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7428 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7429 }
7430
7431 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7432 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7433 *effectMode);
7434 if (NAME_NOT_FOUND != val) {
7435 uint8_t fwk_effectMode = (uint8_t)val;
7436 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7437 }
7438 }
7439
7440 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7441 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7442 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7443 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7444 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7445 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7446 }
7447 int32_t fwk_testPatternData[4];
7448 fwk_testPatternData[0] = testPatternData->r;
7449 fwk_testPatternData[3] = testPatternData->b;
7450 switch (gCamCapability[mCameraId]->color_arrangement) {
7451 case CAM_FILTER_ARRANGEMENT_RGGB:
7452 case CAM_FILTER_ARRANGEMENT_GRBG:
7453 fwk_testPatternData[1] = testPatternData->gr;
7454 fwk_testPatternData[2] = testPatternData->gb;
7455 break;
7456 case CAM_FILTER_ARRANGEMENT_GBRG:
7457 case CAM_FILTER_ARRANGEMENT_BGGR:
7458 fwk_testPatternData[2] = testPatternData->gr;
7459 fwk_testPatternData[1] = testPatternData->gb;
7460 break;
7461 default:
7462 LOGE("color arrangement %d is not supported",
7463 gCamCapability[mCameraId]->color_arrangement);
7464 break;
7465 }
7466 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7467 }
7468
7469 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7470 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7471 }
7472
7473 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7474 String8 str((const char *)gps_methods);
7475 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7476 }
7477
7478 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7479 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7480 }
7481
7482 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7483 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7484 }
7485
7486 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7487 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7488 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7489 }
7490
7491 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7492 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7493 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7494 }
7495
7496 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7497 int32_t fwk_thumb_size[2];
7498 fwk_thumb_size[0] = thumb_size->width;
7499 fwk_thumb_size[1] = thumb_size->height;
7500 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7501 }
7502
7503 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7504 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7505 privateData,
7506 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7507 }
7508
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007509 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007510 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007511 meteringMode, 1);
7512 }
7513
Thierry Strudel54dc9782017-02-15 12:12:10 -08007514 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7515 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7516 LOGD("hdr_scene_data: %d %f\n",
7517 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7518 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7519 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7520 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7521 &isHdr, 1);
7522 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7523 &isHdrConfidence, 1);
7524 }
7525
7526
7527
Thierry Strudel3d639192016-09-09 11:52:26 -07007528 if (metadata->is_tuning_params_valid) {
7529 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7530 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7531 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7532
7533
7534 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7535 sizeof(uint32_t));
7536 data += sizeof(uint32_t);
7537
7538 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7539 sizeof(uint32_t));
7540 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7541 data += sizeof(uint32_t);
7542
7543 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7544 sizeof(uint32_t));
7545 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7546 data += sizeof(uint32_t);
7547
7548 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7549 sizeof(uint32_t));
7550 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7551 data += sizeof(uint32_t);
7552
7553 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7554 sizeof(uint32_t));
7555 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7556 data += sizeof(uint32_t);
7557
7558 metadata->tuning_params.tuning_mod3_data_size = 0;
7559 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7560 sizeof(uint32_t));
7561 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7562 data += sizeof(uint32_t);
7563
7564 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7565 TUNING_SENSOR_DATA_MAX);
7566 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7567 count);
7568 data += count;
7569
7570 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7571 TUNING_VFE_DATA_MAX);
7572 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7573 count);
7574 data += count;
7575
7576 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7577 TUNING_CPP_DATA_MAX);
7578 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7579 count);
7580 data += count;
7581
7582 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7583 TUNING_CAC_DATA_MAX);
7584 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7585 count);
7586 data += count;
7587
7588 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7589 (int32_t *)(void *)tuning_meta_data_blob,
7590 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7591 }
7592
7593 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7594 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7595 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7596 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7597 NEUTRAL_COL_POINTS);
7598 }
7599
7600 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7601 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7602 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7603 }
7604
7605 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7606 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7607 // Adjust crop region from sensor output coordinate system to active
7608 // array coordinate system.
7609 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7610 hAeRegions->rect.width, hAeRegions->rect.height);
7611
7612 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7613 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7614 REGIONS_TUPLE_COUNT);
7615 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7616 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7617 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7618 hAeRegions->rect.height);
7619 }
7620
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007621 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7622 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7623 if (NAME_NOT_FOUND != val) {
7624 uint8_t fwkAfMode = (uint8_t)val;
7625 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7626 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7627 } else {
7628 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7629 val);
7630 }
7631 }
7632
Thierry Strudel3d639192016-09-09 11:52:26 -07007633 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7634 uint8_t fwk_afState = (uint8_t) *afState;
7635 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007636 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007637 }
7638
7639 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7640 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7641 }
7642
7643 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7644 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7645 }
7646
7647 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7648 uint8_t fwk_lensState = *lensState;
7649 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7650 }
7651
Thierry Strudel3d639192016-09-09 11:52:26 -07007652
7653 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007654 uint32_t ab_mode = *hal_ab_mode;
7655 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7656 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7657 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7658 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007659 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007660 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007661 if (NAME_NOT_FOUND != val) {
7662 uint8_t fwk_ab_mode = (uint8_t)val;
7663 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7664 }
7665 }
7666
7667 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7668 int val = lookupFwkName(SCENE_MODES_MAP,
7669 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7670 if (NAME_NOT_FOUND != val) {
7671 uint8_t fwkBestshotMode = (uint8_t)val;
7672 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7673 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7674 } else {
7675 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7676 }
7677 }
7678
7679 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7680 uint8_t fwk_mode = (uint8_t) *mode;
7681 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7682 }
7683
7684 /* Constant metadata values to be update*/
7685 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7686 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7687
7688 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7689 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7690
7691 int32_t hotPixelMap[2];
7692 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7693
7694 // CDS
7695 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7696 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7697 }
7698
Thierry Strudel04e026f2016-10-10 11:27:36 -07007699 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7700 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007701 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007702 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7703 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7704 } else {
7705 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7706 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007707
7708 if(fwk_hdr != curr_hdr_state) {
7709 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7710 if(fwk_hdr)
7711 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7712 else
7713 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7714 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007715 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7716 }
7717
Thierry Strudel54dc9782017-02-15 12:12:10 -08007718 //binning correction
7719 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7720 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7721 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7722 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7723 }
7724
Thierry Strudel04e026f2016-10-10 11:27:36 -07007725 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007726 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007727 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7728 int8_t is_ir_on = 0;
7729
7730 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7731 if(is_ir_on != curr_ir_state) {
7732 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7733 if(is_ir_on)
7734 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7735 else
7736 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7737 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007738 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007739 }
7740
Thierry Strudel269c81a2016-10-12 12:13:59 -07007741 // AEC SPEED
7742 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7743 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7744 }
7745
7746 // AWB SPEED
7747 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7748 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7749 }
7750
Thierry Strudel3d639192016-09-09 11:52:26 -07007751 // TNR
7752 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7753 uint8_t tnr_enable = tnr->denoise_enable;
7754 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007755 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7756 int8_t is_tnr_on = 0;
7757
7758 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7759 if(is_tnr_on != curr_tnr_state) {
7760 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7761 if(is_tnr_on)
7762 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7763 else
7764 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7765 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007766
7767 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7768 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7769 }
7770
7771 // Reprocess crop data
7772 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7773 uint8_t cnt = crop_data->num_of_streams;
7774 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7775 // mm-qcamera-daemon only posts crop_data for streams
7776 // not linked to pproc. So no valid crop metadata is not
7777 // necessarily an error case.
7778 LOGD("No valid crop metadata entries");
7779 } else {
7780 uint32_t reproc_stream_id;
7781 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7782 LOGD("No reprocessible stream found, ignore crop data");
7783 } else {
7784 int rc = NO_ERROR;
7785 Vector<int32_t> roi_map;
7786 int32_t *crop = new int32_t[cnt*4];
7787 if (NULL == crop) {
7788 rc = NO_MEMORY;
7789 }
7790 if (NO_ERROR == rc) {
7791 int32_t streams_found = 0;
7792 for (size_t i = 0; i < cnt; i++) {
7793 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7794 if (pprocDone) {
7795 // HAL already does internal reprocessing,
7796 // either via reprocessing before JPEG encoding,
7797 // or offline postprocessing for pproc bypass case.
7798 crop[0] = 0;
7799 crop[1] = 0;
7800 crop[2] = mInputStreamInfo.dim.width;
7801 crop[3] = mInputStreamInfo.dim.height;
7802 } else {
7803 crop[0] = crop_data->crop_info[i].crop.left;
7804 crop[1] = crop_data->crop_info[i].crop.top;
7805 crop[2] = crop_data->crop_info[i].crop.width;
7806 crop[3] = crop_data->crop_info[i].crop.height;
7807 }
7808 roi_map.add(crop_data->crop_info[i].roi_map.left);
7809 roi_map.add(crop_data->crop_info[i].roi_map.top);
7810 roi_map.add(crop_data->crop_info[i].roi_map.width);
7811 roi_map.add(crop_data->crop_info[i].roi_map.height);
7812 streams_found++;
7813 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7814 crop[0], crop[1], crop[2], crop[3]);
7815 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7816 crop_data->crop_info[i].roi_map.left,
7817 crop_data->crop_info[i].roi_map.top,
7818 crop_data->crop_info[i].roi_map.width,
7819 crop_data->crop_info[i].roi_map.height);
7820 break;
7821
7822 }
7823 }
7824 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7825 &streams_found, 1);
7826 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7827 crop, (size_t)(streams_found * 4));
7828 if (roi_map.array()) {
7829 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7830 roi_map.array(), roi_map.size());
7831 }
7832 }
7833 if (crop) {
7834 delete [] crop;
7835 }
7836 }
7837 }
7838 }
7839
7840 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7841 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7842 // so hardcoding the CAC result to OFF mode.
7843 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7844 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7845 } else {
7846 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7847 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7848 *cacMode);
7849 if (NAME_NOT_FOUND != val) {
7850 uint8_t resultCacMode = (uint8_t)val;
7851 // check whether CAC result from CB is equal to Framework set CAC mode
7852 // If not equal then set the CAC mode came in corresponding request
7853 if (fwk_cacMode != resultCacMode) {
7854 resultCacMode = fwk_cacMode;
7855 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007856 //Check if CAC is disabled by property
7857 if (m_cacModeDisabled) {
7858 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7859 }
7860
Thierry Strudel3d639192016-09-09 11:52:26 -07007861 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7862 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7863 } else {
7864 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7865 }
7866 }
7867 }
7868
7869 // Post blob of cam_cds_data through vendor tag.
7870 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7871 uint8_t cnt = cdsInfo->num_of_streams;
7872 cam_cds_data_t cdsDataOverride;
7873 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7874 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7875 cdsDataOverride.num_of_streams = 1;
7876 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7877 uint32_t reproc_stream_id;
7878 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7879 LOGD("No reprocessible stream found, ignore cds data");
7880 } else {
7881 for (size_t i = 0; i < cnt; i++) {
7882 if (cdsInfo->cds_info[i].stream_id ==
7883 reproc_stream_id) {
7884 cdsDataOverride.cds_info[0].cds_enable =
7885 cdsInfo->cds_info[i].cds_enable;
7886 break;
7887 }
7888 }
7889 }
7890 } else {
7891 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7892 }
7893 camMetadata.update(QCAMERA3_CDS_INFO,
7894 (uint8_t *)&cdsDataOverride,
7895 sizeof(cam_cds_data_t));
7896 }
7897
7898 // Ldaf calibration data
7899 if (!mLdafCalibExist) {
7900 IF_META_AVAILABLE(uint32_t, ldafCalib,
7901 CAM_INTF_META_LDAF_EXIF, metadata) {
7902 mLdafCalibExist = true;
7903 mLdafCalib[0] = ldafCalib[0];
7904 mLdafCalib[1] = ldafCalib[1];
7905 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7906 ldafCalib[0], ldafCalib[1]);
7907 }
7908 }
7909
Thierry Strudel54dc9782017-02-15 12:12:10 -08007910 // EXIF debug data through vendor tag
7911 /*
7912 * Mobicat Mask can assume 3 values:
7913 * 1 refers to Mobicat data,
7914 * 2 refers to Stats Debug and Exif Debug Data
7915 * 3 refers to Mobicat and Stats Debug Data
7916 * We want to make sure that we are sending Exif debug data
7917 * only when Mobicat Mask is 2.
7918 */
7919 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7920 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7921 (uint8_t *)(void *)mExifParams.debug_params,
7922 sizeof(mm_jpeg_debug_exif_params_t));
7923 }
7924
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007925 // Reprocess and DDM debug data through vendor tag
7926 cam_reprocess_info_t repro_info;
7927 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007928 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7929 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007930 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007931 }
7932 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7933 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007934 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007935 }
7936 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7937 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007938 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007939 }
7940 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7941 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007942 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007943 }
7944 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7945 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007946 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007947 }
7948 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007949 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007950 }
7951 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7952 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007953 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007954 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007955 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7956 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7957 }
7958 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7959 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7960 }
7961 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7962 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007963
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007964 // INSTANT AEC MODE
7965 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7966 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7967 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7968 }
7969
Shuzhen Wange763e802016-03-31 10:24:29 -07007970 // AF scene change
7971 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7972 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7973 }
7974
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007975 // Enable ZSL
7976 if (enableZsl != nullptr) {
7977 uint8_t value = *enableZsl ?
7978 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7979 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7980 }
7981
Thierry Strudel3d639192016-09-09 11:52:26 -07007982 resultMetadata = camMetadata.release();
7983 return resultMetadata;
7984}
7985
7986/*===========================================================================
7987 * FUNCTION : saveExifParams
7988 *
7989 * DESCRIPTION:
7990 *
7991 * PARAMETERS :
7992 * @metadata : metadata information from callback
7993 *
7994 * RETURN : none
7995 *
7996 *==========================================================================*/
7997void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7998{
7999 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8000 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8001 if (mExifParams.debug_params) {
8002 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8003 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8004 }
8005 }
8006 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8007 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8008 if (mExifParams.debug_params) {
8009 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8010 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8011 }
8012 }
8013 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8014 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8015 if (mExifParams.debug_params) {
8016 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8017 mExifParams.debug_params->af_debug_params_valid = TRUE;
8018 }
8019 }
8020 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8021 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8022 if (mExifParams.debug_params) {
8023 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8024 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8025 }
8026 }
8027 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8028 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8029 if (mExifParams.debug_params) {
8030 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8031 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8032 }
8033 }
8034 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8035 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8036 if (mExifParams.debug_params) {
8037 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8038 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8039 }
8040 }
8041 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8042 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8043 if (mExifParams.debug_params) {
8044 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8045 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8046 }
8047 }
8048 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8049 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8050 if (mExifParams.debug_params) {
8051 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8052 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8053 }
8054 }
8055}
8056
8057/*===========================================================================
8058 * FUNCTION : get3AExifParams
8059 *
8060 * DESCRIPTION:
8061 *
8062 * PARAMETERS : none
8063 *
8064 *
8065 * RETURN : mm_jpeg_exif_params_t
8066 *
8067 *==========================================================================*/
8068mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8069{
8070 return mExifParams;
8071}
8072
8073/*===========================================================================
8074 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8075 *
8076 * DESCRIPTION:
8077 *
8078 * PARAMETERS :
8079 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008080 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8081 * urgent metadata in a batch. Always true for
8082 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008083 *
8084 * RETURN : camera_metadata_t*
8085 * metadata in a format specified by fwk
8086 *==========================================================================*/
8087camera_metadata_t*
8088QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008089 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008090{
8091 CameraMetadata camMetadata;
8092 camera_metadata_t *resultMetadata;
8093
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008094 if (!lastUrgentMetadataInBatch) {
8095 /* In batch mode, use empty metadata if this is not the last in batch
8096 */
8097 resultMetadata = allocate_camera_metadata(0, 0);
8098 return resultMetadata;
8099 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008100
8101 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8102 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8103 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8104 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8105 }
8106
8107 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8108 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8109 &aecTrigger->trigger, 1);
8110 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8111 &aecTrigger->trigger_id, 1);
8112 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8113 aecTrigger->trigger);
8114 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8115 aecTrigger->trigger_id);
8116 }
8117
8118 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8119 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8120 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8121 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8122 }
8123
Thierry Strudel3d639192016-09-09 11:52:26 -07008124 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8125 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8126 &af_trigger->trigger, 1);
8127 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8128 af_trigger->trigger);
8129 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8130 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8131 af_trigger->trigger_id);
8132 }
8133
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008134 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8135 /*af regions*/
8136 int32_t afRegions[REGIONS_TUPLE_COUNT];
8137 // Adjust crop region from sensor output coordinate system to active
8138 // array coordinate system.
8139 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8140 hAfRegions->rect.width, hAfRegions->rect.height);
8141
8142 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8143 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8144 REGIONS_TUPLE_COUNT);
8145 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8146 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8147 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8148 hAfRegions->rect.height);
8149 }
8150
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008151 // AF region confidence
8152 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8153 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8154 }
8155
Thierry Strudel3d639192016-09-09 11:52:26 -07008156 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8157 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8158 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8159 if (NAME_NOT_FOUND != val) {
8160 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8161 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8162 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8163 } else {
8164 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8165 }
8166 }
8167
8168 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8169 uint32_t aeMode = CAM_AE_MODE_MAX;
8170 int32_t flashMode = CAM_FLASH_MODE_MAX;
8171 int32_t redeye = -1;
8172 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8173 aeMode = *pAeMode;
8174 }
8175 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8176 flashMode = *pFlashMode;
8177 }
8178 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8179 redeye = *pRedeye;
8180 }
8181
8182 if (1 == redeye) {
8183 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8184 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8185 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8186 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8187 flashMode);
8188 if (NAME_NOT_FOUND != val) {
8189 fwk_aeMode = (uint8_t)val;
8190 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8191 } else {
8192 LOGE("Unsupported flash mode %d", flashMode);
8193 }
8194 } else if (aeMode == CAM_AE_MODE_ON) {
8195 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8196 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8197 } else if (aeMode == CAM_AE_MODE_OFF) {
8198 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8199 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008200 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8201 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8202 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008203 } else {
8204 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8205 "flashMode:%d, aeMode:%u!!!",
8206 redeye, flashMode, aeMode);
8207 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008208 if (mInstantAEC) {
8209 // Increment frame Idx count untill a bound reached for instant AEC.
8210 mInstantAecFrameIdxCount++;
8211 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8212 CAM_INTF_META_AEC_INFO, metadata) {
8213 LOGH("ae_params->settled = %d",ae_params->settled);
8214 // If AEC settled, or if number of frames reached bound value,
8215 // should reset instant AEC.
8216 if (ae_params->settled ||
8217 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8218 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8219 mInstantAEC = false;
8220 mResetInstantAEC = true;
8221 mInstantAecFrameIdxCount = 0;
8222 }
8223 }
8224 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008225 resultMetadata = camMetadata.release();
8226 return resultMetadata;
8227}
8228
8229/*===========================================================================
8230 * FUNCTION : dumpMetadataToFile
8231 *
8232 * DESCRIPTION: Dumps tuning metadata to file system
8233 *
8234 * PARAMETERS :
8235 * @meta : tuning metadata
8236 * @dumpFrameCount : current dump frame count
8237 * @enabled : Enable mask
8238 *
8239 *==========================================================================*/
8240void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8241 uint32_t &dumpFrameCount,
8242 bool enabled,
8243 const char *type,
8244 uint32_t frameNumber)
8245{
8246 //Some sanity checks
8247 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8248 LOGE("Tuning sensor data size bigger than expected %d: %d",
8249 meta.tuning_sensor_data_size,
8250 TUNING_SENSOR_DATA_MAX);
8251 return;
8252 }
8253
8254 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8255 LOGE("Tuning VFE data size bigger than expected %d: %d",
8256 meta.tuning_vfe_data_size,
8257 TUNING_VFE_DATA_MAX);
8258 return;
8259 }
8260
8261 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8262 LOGE("Tuning CPP data size bigger than expected %d: %d",
8263 meta.tuning_cpp_data_size,
8264 TUNING_CPP_DATA_MAX);
8265 return;
8266 }
8267
8268 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8269 LOGE("Tuning CAC data size bigger than expected %d: %d",
8270 meta.tuning_cac_data_size,
8271 TUNING_CAC_DATA_MAX);
8272 return;
8273 }
8274 //
8275
8276 if(enabled){
8277 char timeBuf[FILENAME_MAX];
8278 char buf[FILENAME_MAX];
8279 memset(buf, 0, sizeof(buf));
8280 memset(timeBuf, 0, sizeof(timeBuf));
8281 time_t current_time;
8282 struct tm * timeinfo;
8283 time (&current_time);
8284 timeinfo = localtime (&current_time);
8285 if (timeinfo != NULL) {
8286 strftime (timeBuf, sizeof(timeBuf),
8287 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8288 }
8289 String8 filePath(timeBuf);
8290 snprintf(buf,
8291 sizeof(buf),
8292 "%dm_%s_%d.bin",
8293 dumpFrameCount,
8294 type,
8295 frameNumber);
8296 filePath.append(buf);
8297 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8298 if (file_fd >= 0) {
8299 ssize_t written_len = 0;
8300 meta.tuning_data_version = TUNING_DATA_VERSION;
8301 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8302 written_len += write(file_fd, data, sizeof(uint32_t));
8303 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8304 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8305 written_len += write(file_fd, data, sizeof(uint32_t));
8306 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8307 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8308 written_len += write(file_fd, data, sizeof(uint32_t));
8309 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8310 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8311 written_len += write(file_fd, data, sizeof(uint32_t));
8312 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8313 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8314 written_len += write(file_fd, data, sizeof(uint32_t));
8315 meta.tuning_mod3_data_size = 0;
8316 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8317 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8318 written_len += write(file_fd, data, sizeof(uint32_t));
8319 size_t total_size = meta.tuning_sensor_data_size;
8320 data = (void *)((uint8_t *)&meta.data);
8321 written_len += write(file_fd, data, total_size);
8322 total_size = meta.tuning_vfe_data_size;
8323 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8324 written_len += write(file_fd, data, total_size);
8325 total_size = meta.tuning_cpp_data_size;
8326 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8327 written_len += write(file_fd, data, total_size);
8328 total_size = meta.tuning_cac_data_size;
8329 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8330 written_len += write(file_fd, data, total_size);
8331 close(file_fd);
8332 }else {
8333 LOGE("fail to open file for metadata dumping");
8334 }
8335 }
8336}
8337
8338/*===========================================================================
8339 * FUNCTION : cleanAndSortStreamInfo
8340 *
8341 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8342 * and sort them such that raw stream is at the end of the list
8343 * This is a workaround for camera daemon constraint.
8344 *
8345 * PARAMETERS : None
8346 *
8347 *==========================================================================*/
8348void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8349{
8350 List<stream_info_t *> newStreamInfo;
8351
8352 /*clean up invalid streams*/
8353 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8354 it != mStreamInfo.end();) {
8355 if(((*it)->status) == INVALID){
8356 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8357 delete channel;
8358 free(*it);
8359 it = mStreamInfo.erase(it);
8360 } else {
8361 it++;
8362 }
8363 }
8364
8365 // Move preview/video/callback/snapshot streams into newList
8366 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8367 it != mStreamInfo.end();) {
8368 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8369 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8370 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8371 newStreamInfo.push_back(*it);
8372 it = mStreamInfo.erase(it);
8373 } else
8374 it++;
8375 }
8376 // Move raw streams into newList
8377 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8378 it != mStreamInfo.end();) {
8379 newStreamInfo.push_back(*it);
8380 it = mStreamInfo.erase(it);
8381 }
8382
8383 mStreamInfo = newStreamInfo;
8384}
8385
8386/*===========================================================================
8387 * FUNCTION : extractJpegMetadata
8388 *
8389 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8390 * JPEG metadata is cached in HAL, and return as part of capture
8391 * result when metadata is returned from camera daemon.
8392 *
8393 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8394 * @request: capture request
8395 *
8396 *==========================================================================*/
8397void QCamera3HardwareInterface::extractJpegMetadata(
8398 CameraMetadata& jpegMetadata,
8399 const camera3_capture_request_t *request)
8400{
8401 CameraMetadata frame_settings;
8402 frame_settings = request->settings;
8403
8404 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8405 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8406 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8407 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8408
8409 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8410 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8411 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8412 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8413
8414 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8415 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8416 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8417 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8418
8419 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8420 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8421 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8422 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8423
8424 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8425 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8426 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8427 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8428
8429 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8430 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8431 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8432 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8433
8434 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8435 int32_t thumbnail_size[2];
8436 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8437 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8438 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8439 int32_t orientation =
8440 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008441 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008442 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8443 int32_t temp;
8444 temp = thumbnail_size[0];
8445 thumbnail_size[0] = thumbnail_size[1];
8446 thumbnail_size[1] = temp;
8447 }
8448 }
8449 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8450 thumbnail_size,
8451 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8452 }
8453
8454}
8455
8456/*===========================================================================
8457 * FUNCTION : convertToRegions
8458 *
8459 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8460 *
8461 * PARAMETERS :
8462 * @rect : cam_rect_t struct to convert
8463 * @region : int32_t destination array
8464 * @weight : if we are converting from cam_area_t, weight is valid
8465 * else weight = -1
8466 *
8467 *==========================================================================*/
8468void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8469 int32_t *region, int weight)
8470{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008471 region[FACE_LEFT] = rect.left;
8472 region[FACE_TOP] = rect.top;
8473 region[FACE_RIGHT] = rect.left + rect.width;
8474 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008475 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008476 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008477 }
8478}
8479
8480/*===========================================================================
8481 * FUNCTION : convertFromRegions
8482 *
8483 * DESCRIPTION: helper method to convert from array to cam_rect_t
8484 *
8485 * PARAMETERS :
8486 * @rect : cam_rect_t struct to convert
8487 * @region : int32_t destination array
8488 * @weight : if we are converting from cam_area_t, weight is valid
8489 * else weight = -1
8490 *
8491 *==========================================================================*/
8492void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008493 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008494{
Thierry Strudel3d639192016-09-09 11:52:26 -07008495 int32_t x_min = frame_settings.find(tag).data.i32[0];
8496 int32_t y_min = frame_settings.find(tag).data.i32[1];
8497 int32_t x_max = frame_settings.find(tag).data.i32[2];
8498 int32_t y_max = frame_settings.find(tag).data.i32[3];
8499 roi.weight = frame_settings.find(tag).data.i32[4];
8500 roi.rect.left = x_min;
8501 roi.rect.top = y_min;
8502 roi.rect.width = x_max - x_min;
8503 roi.rect.height = y_max - y_min;
8504}
8505
8506/*===========================================================================
8507 * FUNCTION : resetIfNeededROI
8508 *
8509 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8510 * crop region
8511 *
8512 * PARAMETERS :
8513 * @roi : cam_area_t struct to resize
8514 * @scalerCropRegion : cam_crop_region_t region to compare against
8515 *
8516 *
8517 *==========================================================================*/
8518bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8519 const cam_crop_region_t* scalerCropRegion)
8520{
8521 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8522 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8523 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8524 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8525
8526 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8527 * without having this check the calculations below to validate if the roi
8528 * is inside scalar crop region will fail resulting in the roi not being
8529 * reset causing algorithm to continue to use stale roi window
8530 */
8531 if (roi->weight == 0) {
8532 return true;
8533 }
8534
8535 if ((roi_x_max < scalerCropRegion->left) ||
8536 // right edge of roi window is left of scalar crop's left edge
8537 (roi_y_max < scalerCropRegion->top) ||
8538 // bottom edge of roi window is above scalar crop's top edge
8539 (roi->rect.left > crop_x_max) ||
8540 // left edge of roi window is beyond(right) of scalar crop's right edge
8541 (roi->rect.top > crop_y_max)){
8542 // top edge of roi windo is above scalar crop's top edge
8543 return false;
8544 }
8545 if (roi->rect.left < scalerCropRegion->left) {
8546 roi->rect.left = scalerCropRegion->left;
8547 }
8548 if (roi->rect.top < scalerCropRegion->top) {
8549 roi->rect.top = scalerCropRegion->top;
8550 }
8551 if (roi_x_max > crop_x_max) {
8552 roi_x_max = crop_x_max;
8553 }
8554 if (roi_y_max > crop_y_max) {
8555 roi_y_max = crop_y_max;
8556 }
8557 roi->rect.width = roi_x_max - roi->rect.left;
8558 roi->rect.height = roi_y_max - roi->rect.top;
8559 return true;
8560}
8561
8562/*===========================================================================
8563 * FUNCTION : convertLandmarks
8564 *
8565 * DESCRIPTION: helper method to extract the landmarks from face detection info
8566 *
8567 * PARAMETERS :
8568 * @landmark_data : input landmark data to be converted
8569 * @landmarks : int32_t destination array
8570 *
8571 *
8572 *==========================================================================*/
8573void QCamera3HardwareInterface::convertLandmarks(
8574 cam_face_landmarks_info_t landmark_data,
8575 int32_t *landmarks)
8576{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008577 if (landmark_data.is_left_eye_valid) {
8578 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8579 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8580 } else {
8581 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8582 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8583 }
8584
8585 if (landmark_data.is_right_eye_valid) {
8586 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8587 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8588 } else {
8589 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8590 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8591 }
8592
8593 if (landmark_data.is_mouth_valid) {
8594 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8595 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8596 } else {
8597 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8598 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8599 }
8600}
8601
8602/*===========================================================================
8603 * FUNCTION : setInvalidLandmarks
8604 *
8605 * DESCRIPTION: helper method to set invalid landmarks
8606 *
8607 * PARAMETERS :
8608 * @landmarks : int32_t destination array
8609 *
8610 *
8611 *==========================================================================*/
8612void QCamera3HardwareInterface::setInvalidLandmarks(
8613 int32_t *landmarks)
8614{
8615 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8616 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8617 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8618 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8619 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8620 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008621}
8622
8623#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008624
8625/*===========================================================================
8626 * FUNCTION : getCapabilities
8627 *
8628 * DESCRIPTION: query camera capability from back-end
8629 *
8630 * PARAMETERS :
8631 * @ops : mm-interface ops structure
8632 * @cam_handle : camera handle for which we need capability
8633 *
8634 * RETURN : ptr type of capability structure
8635 * capability for success
8636 * NULL for failure
8637 *==========================================================================*/
8638cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8639 uint32_t cam_handle)
8640{
8641 int rc = NO_ERROR;
8642 QCamera3HeapMemory *capabilityHeap = NULL;
8643 cam_capability_t *cap_ptr = NULL;
8644
8645 if (ops == NULL) {
8646 LOGE("Invalid arguments");
8647 return NULL;
8648 }
8649
8650 capabilityHeap = new QCamera3HeapMemory(1);
8651 if (capabilityHeap == NULL) {
8652 LOGE("creation of capabilityHeap failed");
8653 return NULL;
8654 }
8655
8656 /* Allocate memory for capability buffer */
8657 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8658 if(rc != OK) {
8659 LOGE("No memory for cappability");
8660 goto allocate_failed;
8661 }
8662
8663 /* Map memory for capability buffer */
8664 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8665
8666 rc = ops->map_buf(cam_handle,
8667 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8668 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8669 if(rc < 0) {
8670 LOGE("failed to map capability buffer");
8671 rc = FAILED_TRANSACTION;
8672 goto map_failed;
8673 }
8674
8675 /* Query Capability */
8676 rc = ops->query_capability(cam_handle);
8677 if(rc < 0) {
8678 LOGE("failed to query capability");
8679 rc = FAILED_TRANSACTION;
8680 goto query_failed;
8681 }
8682
8683 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8684 if (cap_ptr == NULL) {
8685 LOGE("out of memory");
8686 rc = NO_MEMORY;
8687 goto query_failed;
8688 }
8689
8690 memset(cap_ptr, 0, sizeof(cam_capability_t));
8691 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8692
8693 int index;
8694 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8695 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8696 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8697 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8698 }
8699
8700query_failed:
8701 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8702map_failed:
8703 capabilityHeap->deallocate();
8704allocate_failed:
8705 delete capabilityHeap;
8706
8707 if (rc != NO_ERROR) {
8708 return NULL;
8709 } else {
8710 return cap_ptr;
8711 }
8712}
8713
Thierry Strudel3d639192016-09-09 11:52:26 -07008714/*===========================================================================
8715 * FUNCTION : initCapabilities
8716 *
8717 * DESCRIPTION: initialize camera capabilities in static data struct
8718 *
8719 * PARAMETERS :
8720 * @cameraId : camera Id
8721 *
8722 * RETURN : int32_t type of status
8723 * NO_ERROR -- success
8724 * none-zero failure code
8725 *==========================================================================*/
8726int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8727{
8728 int rc = 0;
8729 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008730 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008731
8732 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8733 if (rc) {
8734 LOGE("camera_open failed. rc = %d", rc);
8735 goto open_failed;
8736 }
8737 if (!cameraHandle) {
8738 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8739 goto open_failed;
8740 }
8741
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008742 handle = get_main_camera_handle(cameraHandle->camera_handle);
8743 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8744 if (gCamCapability[cameraId] == NULL) {
8745 rc = FAILED_TRANSACTION;
8746 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008747 }
8748
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008749 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008750 if (is_dual_camera_by_idx(cameraId)) {
8751 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8752 gCamCapability[cameraId]->aux_cam_cap =
8753 getCapabilities(cameraHandle->ops, handle);
8754 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8755 rc = FAILED_TRANSACTION;
8756 free(gCamCapability[cameraId]);
8757 goto failed_op;
8758 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008759
8760 // Copy the main camera capability to main_cam_cap struct
8761 gCamCapability[cameraId]->main_cam_cap =
8762 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8763 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8764 LOGE("out of memory");
8765 rc = NO_MEMORY;
8766 goto failed_op;
8767 }
8768 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8769 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008770 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008771failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008772 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8773 cameraHandle = NULL;
8774open_failed:
8775 return rc;
8776}
8777
8778/*==========================================================================
8779 * FUNCTION : get3Aversion
8780 *
8781 * DESCRIPTION: get the Q3A S/W version
8782 *
8783 * PARAMETERS :
8784 * @sw_version: Reference of Q3A structure which will hold version info upon
8785 * return
8786 *
8787 * RETURN : None
8788 *
8789 *==========================================================================*/
8790void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8791{
8792 if(gCamCapability[mCameraId])
8793 sw_version = gCamCapability[mCameraId]->q3a_version;
8794 else
8795 LOGE("Capability structure NULL!");
8796}
8797
8798
8799/*===========================================================================
8800 * FUNCTION : initParameters
8801 *
8802 * DESCRIPTION: initialize camera parameters
8803 *
8804 * PARAMETERS :
8805 *
8806 * RETURN : int32_t type of status
8807 * NO_ERROR -- success
8808 * none-zero failure code
8809 *==========================================================================*/
8810int QCamera3HardwareInterface::initParameters()
8811{
8812 int rc = 0;
8813
8814 //Allocate Set Param Buffer
8815 mParamHeap = new QCamera3HeapMemory(1);
8816 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8817 if(rc != OK) {
8818 rc = NO_MEMORY;
8819 LOGE("Failed to allocate SETPARM Heap memory");
8820 delete mParamHeap;
8821 mParamHeap = NULL;
8822 return rc;
8823 }
8824
8825 //Map memory for parameters buffer
8826 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8827 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8828 mParamHeap->getFd(0),
8829 sizeof(metadata_buffer_t),
8830 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8831 if(rc < 0) {
8832 LOGE("failed to map SETPARM buffer");
8833 rc = FAILED_TRANSACTION;
8834 mParamHeap->deallocate();
8835 delete mParamHeap;
8836 mParamHeap = NULL;
8837 return rc;
8838 }
8839
8840 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8841
8842 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8843 return rc;
8844}
8845
8846/*===========================================================================
8847 * FUNCTION : deinitParameters
8848 *
8849 * DESCRIPTION: de-initialize camera parameters
8850 *
8851 * PARAMETERS :
8852 *
8853 * RETURN : NONE
8854 *==========================================================================*/
8855void QCamera3HardwareInterface::deinitParameters()
8856{
8857 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8858 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8859
8860 mParamHeap->deallocate();
8861 delete mParamHeap;
8862 mParamHeap = NULL;
8863
8864 mParameters = NULL;
8865
8866 free(mPrevParameters);
8867 mPrevParameters = NULL;
8868}
8869
8870/*===========================================================================
8871 * FUNCTION : calcMaxJpegSize
8872 *
8873 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8874 *
8875 * PARAMETERS :
8876 *
8877 * RETURN : max_jpeg_size
8878 *==========================================================================*/
8879size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8880{
8881 size_t max_jpeg_size = 0;
8882 size_t temp_width, temp_height;
8883 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8884 MAX_SIZES_CNT);
8885 for (size_t i = 0; i < count; i++) {
8886 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8887 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8888 if (temp_width * temp_height > max_jpeg_size ) {
8889 max_jpeg_size = temp_width * temp_height;
8890 }
8891 }
8892 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8893 return max_jpeg_size;
8894}
8895
8896/*===========================================================================
8897 * FUNCTION : getMaxRawSize
8898 *
8899 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8900 *
8901 * PARAMETERS :
8902 *
8903 * RETURN : Largest supported Raw Dimension
8904 *==========================================================================*/
8905cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8906{
8907 int max_width = 0;
8908 cam_dimension_t maxRawSize;
8909
8910 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8911 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8912 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8913 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8914 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8915 }
8916 }
8917 return maxRawSize;
8918}
8919
8920
8921/*===========================================================================
8922 * FUNCTION : calcMaxJpegDim
8923 *
8924 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8925 *
8926 * PARAMETERS :
8927 *
8928 * RETURN : max_jpeg_dim
8929 *==========================================================================*/
8930cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8931{
8932 cam_dimension_t max_jpeg_dim;
8933 cam_dimension_t curr_jpeg_dim;
8934 max_jpeg_dim.width = 0;
8935 max_jpeg_dim.height = 0;
8936 curr_jpeg_dim.width = 0;
8937 curr_jpeg_dim.height = 0;
8938 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8939 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8940 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8941 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8942 max_jpeg_dim.width * max_jpeg_dim.height ) {
8943 max_jpeg_dim.width = curr_jpeg_dim.width;
8944 max_jpeg_dim.height = curr_jpeg_dim.height;
8945 }
8946 }
8947 return max_jpeg_dim;
8948}
8949
8950/*===========================================================================
8951 * FUNCTION : addStreamConfig
8952 *
8953 * DESCRIPTION: adds the stream configuration to the array
8954 *
8955 * PARAMETERS :
8956 * @available_stream_configs : pointer to stream configuration array
8957 * @scalar_format : scalar format
8958 * @dim : configuration dimension
8959 * @config_type : input or output configuration type
8960 *
8961 * RETURN : NONE
8962 *==========================================================================*/
8963void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8964 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8965{
8966 available_stream_configs.add(scalar_format);
8967 available_stream_configs.add(dim.width);
8968 available_stream_configs.add(dim.height);
8969 available_stream_configs.add(config_type);
8970}
8971
8972/*===========================================================================
8973 * FUNCTION : suppportBurstCapture
8974 *
8975 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8976 *
8977 * PARAMETERS :
8978 * @cameraId : camera Id
8979 *
8980 * RETURN : true if camera supports BURST_CAPTURE
8981 * false otherwise
8982 *==========================================================================*/
8983bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8984{
8985 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8986 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8987 const int32_t highResWidth = 3264;
8988 const int32_t highResHeight = 2448;
8989
8990 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8991 // Maximum resolution images cannot be captured at >= 10fps
8992 // -> not supporting BURST_CAPTURE
8993 return false;
8994 }
8995
8996 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8997 // Maximum resolution images can be captured at >= 20fps
8998 // --> supporting BURST_CAPTURE
8999 return true;
9000 }
9001
9002 // Find the smallest highRes resolution, or largest resolution if there is none
9003 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9004 MAX_SIZES_CNT);
9005 size_t highRes = 0;
9006 while ((highRes + 1 < totalCnt) &&
9007 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9008 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9009 highResWidth * highResHeight)) {
9010 highRes++;
9011 }
9012 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9013 return true;
9014 } else {
9015 return false;
9016 }
9017}
9018
9019/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009020 * FUNCTION : getPDStatIndex
9021 *
9022 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9023 *
9024 * PARAMETERS :
9025 * @caps : camera capabilities
9026 *
9027 * RETURN : int32_t type
9028 * non-negative - on success
9029 * -1 - on failure
9030 *==========================================================================*/
9031int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9032 if (nullptr == caps) {
9033 return -1;
9034 }
9035
9036 uint32_t metaRawCount = caps->meta_raw_channel_count;
9037 int32_t ret = -1;
9038 for (size_t i = 0; i < metaRawCount; i++) {
9039 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9040 ret = i;
9041 break;
9042 }
9043 }
9044
9045 return ret;
9046}
9047
9048/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009049 * FUNCTION : initStaticMetadata
9050 *
9051 * DESCRIPTION: initialize the static metadata
9052 *
9053 * PARAMETERS :
9054 * @cameraId : camera Id
9055 *
9056 * RETURN : int32_t type of status
9057 * 0 -- success
9058 * non-zero failure code
9059 *==========================================================================*/
9060int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9061{
9062 int rc = 0;
9063 CameraMetadata staticInfo;
9064 size_t count = 0;
9065 bool limitedDevice = false;
9066 char prop[PROPERTY_VALUE_MAX];
9067 bool supportBurst = false;
9068
9069 supportBurst = supportBurstCapture(cameraId);
9070
9071 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9072 * guaranteed or if min fps of max resolution is less than 20 fps, its
9073 * advertised as limited device*/
9074 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9075 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9076 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9077 !supportBurst;
9078
9079 uint8_t supportedHwLvl = limitedDevice ?
9080 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009081#ifndef USE_HAL_3_3
9082 // LEVEL_3 - This device will support level 3.
9083 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9084#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009085 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009086#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009087
9088 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9089 &supportedHwLvl, 1);
9090
9091 bool facingBack = false;
9092 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9093 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9094 facingBack = true;
9095 }
9096 /*HAL 3 only*/
9097 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9098 &gCamCapability[cameraId]->min_focus_distance, 1);
9099
9100 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9101 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9102
9103 /*should be using focal lengths but sensor doesn't provide that info now*/
9104 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9105 &gCamCapability[cameraId]->focal_length,
9106 1);
9107
9108 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9109 gCamCapability[cameraId]->apertures,
9110 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9111
9112 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9113 gCamCapability[cameraId]->filter_densities,
9114 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9115
9116
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009117 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9118 size_t mode_count =
9119 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9120 for (size_t i = 0; i < mode_count; i++) {
9121 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9122 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009123 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009124 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009125
9126 int32_t lens_shading_map_size[] = {
9127 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9128 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9129 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9130 lens_shading_map_size,
9131 sizeof(lens_shading_map_size)/sizeof(int32_t));
9132
9133 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9134 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9135
9136 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9137 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9138
9139 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9140 &gCamCapability[cameraId]->max_frame_duration, 1);
9141
9142 camera_metadata_rational baseGainFactor = {
9143 gCamCapability[cameraId]->base_gain_factor.numerator,
9144 gCamCapability[cameraId]->base_gain_factor.denominator};
9145 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9146 &baseGainFactor, 1);
9147
9148 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9149 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9150
9151 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9152 gCamCapability[cameraId]->pixel_array_size.height};
9153 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9154 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9155
9156 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9157 gCamCapability[cameraId]->active_array_size.top,
9158 gCamCapability[cameraId]->active_array_size.width,
9159 gCamCapability[cameraId]->active_array_size.height};
9160 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9161 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9162
9163 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9164 &gCamCapability[cameraId]->white_level, 1);
9165
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009166 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9167 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9168 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009169 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009170 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009171
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009172#ifndef USE_HAL_3_3
9173 bool hasBlackRegions = false;
9174 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9175 LOGW("black_region_count: %d is bounded to %d",
9176 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9177 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9178 }
9179 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9180 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9181 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9182 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9183 }
9184 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9185 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9186 hasBlackRegions = true;
9187 }
9188#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009189 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9190 &gCamCapability[cameraId]->flash_charge_duration, 1);
9191
9192 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9193 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9194
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009195 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9196 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9197 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009198 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9199 &timestampSource, 1);
9200
Thierry Strudel54dc9782017-02-15 12:12:10 -08009201 //update histogram vendor data
9202 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009203 &gCamCapability[cameraId]->histogram_size, 1);
9204
Thierry Strudel54dc9782017-02-15 12:12:10 -08009205 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009206 &gCamCapability[cameraId]->max_histogram_count, 1);
9207
Shuzhen Wang14415f52016-11-16 18:26:18 -08009208 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9209 //so that app can request fewer number of bins than the maximum supported.
9210 std::vector<int32_t> histBins;
9211 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9212 histBins.push_back(maxHistBins);
9213 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9214 (maxHistBins & 0x1) == 0) {
9215 histBins.push_back(maxHistBins >> 1);
9216 maxHistBins >>= 1;
9217 }
9218 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9219 histBins.data(), histBins.size());
9220
Thierry Strudel3d639192016-09-09 11:52:26 -07009221 int32_t sharpness_map_size[] = {
9222 gCamCapability[cameraId]->sharpness_map_size.width,
9223 gCamCapability[cameraId]->sharpness_map_size.height};
9224
9225 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9226 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9227
9228 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9229 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9230
Emilian Peev0f3c3162017-03-15 12:57:46 +00009231 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9232 if (0 <= indexPD) {
9233 // Advertise PD stats data as part of the Depth capabilities
9234 int32_t depthWidth =
9235 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9236 int32_t depthHeight =
9237 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9238 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9239 assert(0 < depthSamplesCount);
9240 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9241 &depthSamplesCount, 1);
9242
9243 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9244 depthHeight,
9245 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9246 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9247 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9248 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9249 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9250
9251 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9252 depthHeight, 33333333,
9253 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9254 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9255 depthMinDuration,
9256 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9257
9258 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9259 depthHeight, 0,
9260 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9261 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9262 depthStallDuration,
9263 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9264
9265 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9266 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9267 }
9268
Thierry Strudel3d639192016-09-09 11:52:26 -07009269 int32_t scalar_formats[] = {
9270 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9271 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9272 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9273 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9274 HAL_PIXEL_FORMAT_RAW10,
9275 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009276 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9277 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9278 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009279
9280 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9281 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9282 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9283 count, MAX_SIZES_CNT, available_processed_sizes);
9284 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9285 available_processed_sizes, count * 2);
9286
9287 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9288 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9289 makeTable(gCamCapability[cameraId]->raw_dim,
9290 count, MAX_SIZES_CNT, available_raw_sizes);
9291 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9292 available_raw_sizes, count * 2);
9293
9294 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9295 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9296 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9297 count, MAX_SIZES_CNT, available_fps_ranges);
9298 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9299 available_fps_ranges, count * 2);
9300
9301 camera_metadata_rational exposureCompensationStep = {
9302 gCamCapability[cameraId]->exp_compensation_step.numerator,
9303 gCamCapability[cameraId]->exp_compensation_step.denominator};
9304 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9305 &exposureCompensationStep, 1);
9306
9307 Vector<uint8_t> availableVstabModes;
9308 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9309 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009310 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009311 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009312 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009313 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009314 count = IS_TYPE_MAX;
9315 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9316 for (size_t i = 0; i < count; i++) {
9317 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9318 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9319 eisSupported = true;
9320 break;
9321 }
9322 }
9323 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009324 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9325 }
9326 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9327 availableVstabModes.array(), availableVstabModes.size());
9328
9329 /*HAL 1 and HAL 3 common*/
9330 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9331 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9332 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009333 // Cap the max zoom to the max preferred value
9334 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009335 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9336 &maxZoom, 1);
9337
9338 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9339 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9340
9341 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9342 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9343 max3aRegions[2] = 0; /* AF not supported */
9344 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9345 max3aRegions, 3);
9346
9347 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9348 memset(prop, 0, sizeof(prop));
9349 property_get("persist.camera.facedetect", prop, "1");
9350 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9351 LOGD("Support face detection mode: %d",
9352 supportedFaceDetectMode);
9353
9354 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009355 /* support mode should be OFF if max number of face is 0 */
9356 if (maxFaces <= 0) {
9357 supportedFaceDetectMode = 0;
9358 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009359 Vector<uint8_t> availableFaceDetectModes;
9360 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9361 if (supportedFaceDetectMode == 1) {
9362 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9363 } else if (supportedFaceDetectMode == 2) {
9364 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9365 } else if (supportedFaceDetectMode == 3) {
9366 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9367 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9368 } else {
9369 maxFaces = 0;
9370 }
9371 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9372 availableFaceDetectModes.array(),
9373 availableFaceDetectModes.size());
9374 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9375 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009376 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9377 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9378 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009379
9380 int32_t exposureCompensationRange[] = {
9381 gCamCapability[cameraId]->exposure_compensation_min,
9382 gCamCapability[cameraId]->exposure_compensation_max};
9383 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9384 exposureCompensationRange,
9385 sizeof(exposureCompensationRange)/sizeof(int32_t));
9386
9387 uint8_t lensFacing = (facingBack) ?
9388 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9389 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9390
9391 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9392 available_thumbnail_sizes,
9393 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9394
9395 /*all sizes will be clubbed into this tag*/
9396 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9397 /*android.scaler.availableStreamConfigurations*/
9398 Vector<int32_t> available_stream_configs;
9399 cam_dimension_t active_array_dim;
9400 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9401 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009402
9403 /*advertise list of input dimensions supported based on below property.
9404 By default all sizes upto 5MP will be advertised.
9405 Note that the setprop resolution format should be WxH.
9406 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9407 To list all supported sizes, setprop needs to be set with "0x0" */
9408 cam_dimension_t minInputSize = {2592,1944}; //5MP
9409 memset(prop, 0, sizeof(prop));
9410 property_get("persist.camera.input.minsize", prop, "2592x1944");
9411 if (strlen(prop) > 0) {
9412 char *saveptr = NULL;
9413 char *token = strtok_r(prop, "x", &saveptr);
9414 if (token != NULL) {
9415 minInputSize.width = atoi(token);
9416 }
9417 token = strtok_r(NULL, "x", &saveptr);
9418 if (token != NULL) {
9419 minInputSize.height = atoi(token);
9420 }
9421 }
9422
Thierry Strudel3d639192016-09-09 11:52:26 -07009423 /* Add input/output stream configurations for each scalar formats*/
9424 for (size_t j = 0; j < scalar_formats_count; j++) {
9425 switch (scalar_formats[j]) {
9426 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9427 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9428 case HAL_PIXEL_FORMAT_RAW10:
9429 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9430 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9431 addStreamConfig(available_stream_configs, scalar_formats[j],
9432 gCamCapability[cameraId]->raw_dim[i],
9433 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9434 }
9435 break;
9436 case HAL_PIXEL_FORMAT_BLOB:
9437 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9438 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9439 addStreamConfig(available_stream_configs, scalar_formats[j],
9440 gCamCapability[cameraId]->picture_sizes_tbl[i],
9441 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9442 }
9443 break;
9444 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9445 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9446 default:
9447 cam_dimension_t largest_picture_size;
9448 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9449 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9450 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9451 addStreamConfig(available_stream_configs, scalar_formats[j],
9452 gCamCapability[cameraId]->picture_sizes_tbl[i],
9453 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009454 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9455 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9456 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9457 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9458 >= minInputSize.width) || (gCamCapability[cameraId]->
9459 picture_sizes_tbl[i].height >= minInputSize.height)) {
9460 addStreamConfig(available_stream_configs, scalar_formats[j],
9461 gCamCapability[cameraId]->picture_sizes_tbl[i],
9462 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9463 }
9464 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009465 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009466
Thierry Strudel3d639192016-09-09 11:52:26 -07009467 break;
9468 }
9469 }
9470
9471 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9472 available_stream_configs.array(), available_stream_configs.size());
9473 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9474 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9475
9476 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9477 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9478
9479 /* android.scaler.availableMinFrameDurations */
9480 Vector<int64_t> available_min_durations;
9481 for (size_t j = 0; j < scalar_formats_count; j++) {
9482 switch (scalar_formats[j]) {
9483 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9484 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9485 case HAL_PIXEL_FORMAT_RAW10:
9486 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9487 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9488 available_min_durations.add(scalar_formats[j]);
9489 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9490 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9491 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9492 }
9493 break;
9494 default:
9495 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9496 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9497 available_min_durations.add(scalar_formats[j]);
9498 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9499 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9500 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9501 }
9502 break;
9503 }
9504 }
9505 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9506 available_min_durations.array(), available_min_durations.size());
9507
9508 Vector<int32_t> available_hfr_configs;
9509 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9510 int32_t fps = 0;
9511 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9512 case CAM_HFR_MODE_60FPS:
9513 fps = 60;
9514 break;
9515 case CAM_HFR_MODE_90FPS:
9516 fps = 90;
9517 break;
9518 case CAM_HFR_MODE_120FPS:
9519 fps = 120;
9520 break;
9521 case CAM_HFR_MODE_150FPS:
9522 fps = 150;
9523 break;
9524 case CAM_HFR_MODE_180FPS:
9525 fps = 180;
9526 break;
9527 case CAM_HFR_MODE_210FPS:
9528 fps = 210;
9529 break;
9530 case CAM_HFR_MODE_240FPS:
9531 fps = 240;
9532 break;
9533 case CAM_HFR_MODE_480FPS:
9534 fps = 480;
9535 break;
9536 case CAM_HFR_MODE_OFF:
9537 case CAM_HFR_MODE_MAX:
9538 default:
9539 break;
9540 }
9541
9542 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9543 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9544 /* For each HFR frame rate, need to advertise one variable fps range
9545 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9546 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9547 * set by the app. When video recording is started, [120, 120] is
9548 * set. This way sensor configuration does not change when recording
9549 * is started */
9550
9551 /* (width, height, fps_min, fps_max, batch_size_max) */
9552 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9553 j < MAX_SIZES_CNT; j++) {
9554 available_hfr_configs.add(
9555 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9556 available_hfr_configs.add(
9557 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9558 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9559 available_hfr_configs.add(fps);
9560 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9561
9562 /* (width, height, fps_min, fps_max, batch_size_max) */
9563 available_hfr_configs.add(
9564 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9565 available_hfr_configs.add(
9566 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9567 available_hfr_configs.add(fps);
9568 available_hfr_configs.add(fps);
9569 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9570 }
9571 }
9572 }
9573 //Advertise HFR capability only if the property is set
9574 memset(prop, 0, sizeof(prop));
9575 property_get("persist.camera.hal3hfr.enable", prop, "1");
9576 uint8_t hfrEnable = (uint8_t)atoi(prop);
9577
9578 if(hfrEnable && available_hfr_configs.array()) {
9579 staticInfo.update(
9580 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9581 available_hfr_configs.array(), available_hfr_configs.size());
9582 }
9583
9584 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9585 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9586 &max_jpeg_size, 1);
9587
9588 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9589 size_t size = 0;
9590 count = CAM_EFFECT_MODE_MAX;
9591 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9592 for (size_t i = 0; i < count; i++) {
9593 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9594 gCamCapability[cameraId]->supported_effects[i]);
9595 if (NAME_NOT_FOUND != val) {
9596 avail_effects[size] = (uint8_t)val;
9597 size++;
9598 }
9599 }
9600 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9601 avail_effects,
9602 size);
9603
9604 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9605 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9606 size_t supported_scene_modes_cnt = 0;
9607 count = CAM_SCENE_MODE_MAX;
9608 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9609 for (size_t i = 0; i < count; i++) {
9610 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9611 CAM_SCENE_MODE_OFF) {
9612 int val = lookupFwkName(SCENE_MODES_MAP,
9613 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9614 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009615
Thierry Strudel3d639192016-09-09 11:52:26 -07009616 if (NAME_NOT_FOUND != val) {
9617 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9618 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9619 supported_scene_modes_cnt++;
9620 }
9621 }
9622 }
9623 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9624 avail_scene_modes,
9625 supported_scene_modes_cnt);
9626
9627 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9628 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9629 supported_scene_modes_cnt,
9630 CAM_SCENE_MODE_MAX,
9631 scene_mode_overrides,
9632 supported_indexes,
9633 cameraId);
9634
9635 if (supported_scene_modes_cnt == 0) {
9636 supported_scene_modes_cnt = 1;
9637 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9638 }
9639
9640 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9641 scene_mode_overrides, supported_scene_modes_cnt * 3);
9642
9643 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9644 ANDROID_CONTROL_MODE_AUTO,
9645 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9646 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9647 available_control_modes,
9648 3);
9649
9650 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9651 size = 0;
9652 count = CAM_ANTIBANDING_MODE_MAX;
9653 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9654 for (size_t i = 0; i < count; i++) {
9655 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9656 gCamCapability[cameraId]->supported_antibandings[i]);
9657 if (NAME_NOT_FOUND != val) {
9658 avail_antibanding_modes[size] = (uint8_t)val;
9659 size++;
9660 }
9661
9662 }
9663 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9664 avail_antibanding_modes,
9665 size);
9666
9667 uint8_t avail_abberation_modes[] = {
9668 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9669 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9670 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9671 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9672 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9673 if (0 == count) {
9674 // If no aberration correction modes are available for a device, this advertise OFF mode
9675 size = 1;
9676 } else {
9677 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9678 // So, advertize all 3 modes if atleast any one mode is supported as per the
9679 // new M requirement
9680 size = 3;
9681 }
9682 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9683 avail_abberation_modes,
9684 size);
9685
9686 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9687 size = 0;
9688 count = CAM_FOCUS_MODE_MAX;
9689 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9690 for (size_t i = 0; i < count; i++) {
9691 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9692 gCamCapability[cameraId]->supported_focus_modes[i]);
9693 if (NAME_NOT_FOUND != val) {
9694 avail_af_modes[size] = (uint8_t)val;
9695 size++;
9696 }
9697 }
9698 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9699 avail_af_modes,
9700 size);
9701
9702 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9703 size = 0;
9704 count = CAM_WB_MODE_MAX;
9705 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9706 for (size_t i = 0; i < count; i++) {
9707 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9708 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9709 gCamCapability[cameraId]->supported_white_balances[i]);
9710 if (NAME_NOT_FOUND != val) {
9711 avail_awb_modes[size] = (uint8_t)val;
9712 size++;
9713 }
9714 }
9715 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9716 avail_awb_modes,
9717 size);
9718
9719 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9720 count = CAM_FLASH_FIRING_LEVEL_MAX;
9721 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9722 count);
9723 for (size_t i = 0; i < count; i++) {
9724 available_flash_levels[i] =
9725 gCamCapability[cameraId]->supported_firing_levels[i];
9726 }
9727 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9728 available_flash_levels, count);
9729
9730 uint8_t flashAvailable;
9731 if (gCamCapability[cameraId]->flash_available)
9732 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9733 else
9734 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9735 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9736 &flashAvailable, 1);
9737
9738 Vector<uint8_t> avail_ae_modes;
9739 count = CAM_AE_MODE_MAX;
9740 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9741 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009742 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9743 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9744 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9745 }
9746 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009747 }
9748 if (flashAvailable) {
9749 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9750 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9751 }
9752 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9753 avail_ae_modes.array(),
9754 avail_ae_modes.size());
9755
9756 int32_t sensitivity_range[2];
9757 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9758 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9759 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9760 sensitivity_range,
9761 sizeof(sensitivity_range) / sizeof(int32_t));
9762
9763 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9764 &gCamCapability[cameraId]->max_analog_sensitivity,
9765 1);
9766
9767 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9768 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9769 &sensor_orientation,
9770 1);
9771
9772 int32_t max_output_streams[] = {
9773 MAX_STALLING_STREAMS,
9774 MAX_PROCESSED_STREAMS,
9775 MAX_RAW_STREAMS};
9776 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9777 max_output_streams,
9778 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9779
9780 uint8_t avail_leds = 0;
9781 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9782 &avail_leds, 0);
9783
9784 uint8_t focus_dist_calibrated;
9785 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9786 gCamCapability[cameraId]->focus_dist_calibrated);
9787 if (NAME_NOT_FOUND != val) {
9788 focus_dist_calibrated = (uint8_t)val;
9789 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9790 &focus_dist_calibrated, 1);
9791 }
9792
9793 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9794 size = 0;
9795 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9796 MAX_TEST_PATTERN_CNT);
9797 for (size_t i = 0; i < count; i++) {
9798 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9799 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9800 if (NAME_NOT_FOUND != testpatternMode) {
9801 avail_testpattern_modes[size] = testpatternMode;
9802 size++;
9803 }
9804 }
9805 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9806 avail_testpattern_modes,
9807 size);
9808
9809 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9810 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9811 &max_pipeline_depth,
9812 1);
9813
9814 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9815 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9816 &partial_result_count,
9817 1);
9818
9819 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9820 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9821
9822 Vector<uint8_t> available_capabilities;
9823 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9824 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9825 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9826 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9827 if (supportBurst) {
9828 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9829 }
9830 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9831 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9832 if (hfrEnable && available_hfr_configs.array()) {
9833 available_capabilities.add(
9834 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9835 }
9836
9837 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9838 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9839 }
9840 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9841 available_capabilities.array(),
9842 available_capabilities.size());
9843
9844 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9845 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9846 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9847 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9848
9849 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9850 &aeLockAvailable, 1);
9851
9852 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9853 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9854 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9855 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9856
9857 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9858 &awbLockAvailable, 1);
9859
9860 int32_t max_input_streams = 1;
9861 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9862 &max_input_streams,
9863 1);
9864
9865 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9866 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9867 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9868 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9869 HAL_PIXEL_FORMAT_YCbCr_420_888};
9870 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9871 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9872
9873 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9874 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9875 &max_latency,
9876 1);
9877
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009878#ifndef USE_HAL_3_3
9879 int32_t isp_sensitivity_range[2];
9880 isp_sensitivity_range[0] =
9881 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9882 isp_sensitivity_range[1] =
9883 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9884 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9885 isp_sensitivity_range,
9886 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9887#endif
9888
Thierry Strudel3d639192016-09-09 11:52:26 -07009889 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9890 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9891 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9892 available_hot_pixel_modes,
9893 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9894
9895 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9896 ANDROID_SHADING_MODE_FAST,
9897 ANDROID_SHADING_MODE_HIGH_QUALITY};
9898 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9899 available_shading_modes,
9900 3);
9901
9902 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9903 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9904 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9905 available_lens_shading_map_modes,
9906 2);
9907
9908 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9909 ANDROID_EDGE_MODE_FAST,
9910 ANDROID_EDGE_MODE_HIGH_QUALITY,
9911 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9912 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9913 available_edge_modes,
9914 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9915
9916 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9917 ANDROID_NOISE_REDUCTION_MODE_FAST,
9918 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9919 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9920 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9921 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9922 available_noise_red_modes,
9923 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9924
9925 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9926 ANDROID_TONEMAP_MODE_FAST,
9927 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9928 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9929 available_tonemap_modes,
9930 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9931
9932 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9933 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9934 available_hot_pixel_map_modes,
9935 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9936
9937 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9938 gCamCapability[cameraId]->reference_illuminant1);
9939 if (NAME_NOT_FOUND != val) {
9940 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9941 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9942 }
9943
9944 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9945 gCamCapability[cameraId]->reference_illuminant2);
9946 if (NAME_NOT_FOUND != val) {
9947 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9948 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9949 }
9950
9951 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9952 (void *)gCamCapability[cameraId]->forward_matrix1,
9953 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9954
9955 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9956 (void *)gCamCapability[cameraId]->forward_matrix2,
9957 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9958
9959 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9960 (void *)gCamCapability[cameraId]->color_transform1,
9961 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9962
9963 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9964 (void *)gCamCapability[cameraId]->color_transform2,
9965 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9966
9967 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9968 (void *)gCamCapability[cameraId]->calibration_transform1,
9969 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9970
9971 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9972 (void *)gCamCapability[cameraId]->calibration_transform2,
9973 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9974
9975 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9976 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9977 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9978 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9979 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9980 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9981 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9982 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9983 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9984 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9985 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9986 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9987 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9988 ANDROID_JPEG_GPS_COORDINATES,
9989 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9990 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9991 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9992 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9993 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9994 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9995 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9996 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9997 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9998 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009999#ifndef USE_HAL_3_3
10000 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10001#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010002 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010003 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010004 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10005 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010006 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010007 /* DevCamDebug metadata request_keys_basic */
10008 DEVCAMDEBUG_META_ENABLE,
10009 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010010 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010011 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010012 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010013 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -080010014 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010015
10016 size_t request_keys_cnt =
10017 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10018 Vector<int32_t> available_request_keys;
10019 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10020 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10021 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10022 }
10023
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010024 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +000010025 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010026 }
10027
Thierry Strudel3d639192016-09-09 11:52:26 -070010028 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10029 available_request_keys.array(), available_request_keys.size());
10030
10031 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10032 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10033 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10034 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10035 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10036 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10037 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10038 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10039 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10040 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10041 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10042 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10043 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10044 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10045 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10046 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10047 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010048 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010049 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10050 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10051 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010052 ANDROID_STATISTICS_FACE_SCORES,
10053#ifndef USE_HAL_3_3
10054 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10055#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010056 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010057 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010058 // DevCamDebug metadata result_keys_basic
10059 DEVCAMDEBUG_META_ENABLE,
10060 // DevCamDebug metadata result_keys AF
10061 DEVCAMDEBUG_AF_LENS_POSITION,
10062 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10063 DEVCAMDEBUG_AF_TOF_DISTANCE,
10064 DEVCAMDEBUG_AF_LUMA,
10065 DEVCAMDEBUG_AF_HAF_STATE,
10066 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10067 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10068 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10069 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10070 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10071 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10072 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10073 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10074 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10075 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10076 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10077 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10078 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10079 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10080 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10081 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10082 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10083 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10084 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10085 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10086 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10087 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10088 // DevCamDebug metadata result_keys AEC
10089 DEVCAMDEBUG_AEC_TARGET_LUMA,
10090 DEVCAMDEBUG_AEC_COMP_LUMA,
10091 DEVCAMDEBUG_AEC_AVG_LUMA,
10092 DEVCAMDEBUG_AEC_CUR_LUMA,
10093 DEVCAMDEBUG_AEC_LINECOUNT,
10094 DEVCAMDEBUG_AEC_REAL_GAIN,
10095 DEVCAMDEBUG_AEC_EXP_INDEX,
10096 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010097 // DevCamDebug metadata result_keys zzHDR
10098 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10099 DEVCAMDEBUG_AEC_L_LINECOUNT,
10100 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10101 DEVCAMDEBUG_AEC_S_LINECOUNT,
10102 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10103 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10104 // DevCamDebug metadata result_keys ADRC
10105 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10106 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10107 DEVCAMDEBUG_AEC_GTM_RATIO,
10108 DEVCAMDEBUG_AEC_LTM_RATIO,
10109 DEVCAMDEBUG_AEC_LA_RATIO,
10110 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010111 // DevCamDebug metadata result_keys AWB
10112 DEVCAMDEBUG_AWB_R_GAIN,
10113 DEVCAMDEBUG_AWB_G_GAIN,
10114 DEVCAMDEBUG_AWB_B_GAIN,
10115 DEVCAMDEBUG_AWB_CCT,
10116 DEVCAMDEBUG_AWB_DECISION,
10117 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010118 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10119 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10120 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010121 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010122 };
10123
Thierry Strudel3d639192016-09-09 11:52:26 -070010124 size_t result_keys_cnt =
10125 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10126
10127 Vector<int32_t> available_result_keys;
10128 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10129 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10130 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10131 }
10132 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10133 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10134 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10135 }
10136 if (supportedFaceDetectMode == 1) {
10137 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10138 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10139 } else if ((supportedFaceDetectMode == 2) ||
10140 (supportedFaceDetectMode == 3)) {
10141 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10142 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10143 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010144#ifndef USE_HAL_3_3
10145 if (hasBlackRegions) {
10146 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10147 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10148 }
10149#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010150
10151 if (gExposeEnableZslKey) {
10152 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10153 }
10154
Thierry Strudel3d639192016-09-09 11:52:26 -070010155 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10156 available_result_keys.array(), available_result_keys.size());
10157
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010158 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010159 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10160 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10161 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10162 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10163 ANDROID_SCALER_CROPPING_TYPE,
10164 ANDROID_SYNC_MAX_LATENCY,
10165 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10166 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10167 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10168 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10169 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10170 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10171 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10172 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10173 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10174 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10175 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10176 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10177 ANDROID_LENS_FACING,
10178 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10179 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10180 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10181 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10182 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10183 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10184 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10185 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10186 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10187 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10188 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10189 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10190 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10191 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10192 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10193 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10194 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10195 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10196 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10197 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010198 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010199 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10200 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10201 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10202 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10203 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10204 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10205 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10206 ANDROID_CONTROL_AVAILABLE_MODES,
10207 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10208 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10209 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10210 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010211 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10212#ifndef USE_HAL_3_3
10213 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10214 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10215#endif
10216 };
10217
10218 Vector<int32_t> available_characteristics_keys;
10219 available_characteristics_keys.appendArray(characteristics_keys_basic,
10220 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10221#ifndef USE_HAL_3_3
10222 if (hasBlackRegions) {
10223 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10224 }
10225#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010226
10227 if (0 <= indexPD) {
10228 int32_t depthKeys[] = {
10229 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10230 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10231 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10232 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10233 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10234 };
10235 available_characteristics_keys.appendArray(depthKeys,
10236 sizeof(depthKeys) / sizeof(depthKeys[0]));
10237 }
10238
Thierry Strudel3d639192016-09-09 11:52:26 -070010239 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010240 available_characteristics_keys.array(),
10241 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010242
10243 /*available stall durations depend on the hw + sw and will be different for different devices */
10244 /*have to add for raw after implementation*/
10245 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10246 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10247
10248 Vector<int64_t> available_stall_durations;
10249 for (uint32_t j = 0; j < stall_formats_count; j++) {
10250 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10251 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10252 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10253 available_stall_durations.add(stall_formats[j]);
10254 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10255 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10256 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10257 }
10258 } else {
10259 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10260 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10261 available_stall_durations.add(stall_formats[j]);
10262 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10263 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10264 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10265 }
10266 }
10267 }
10268 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10269 available_stall_durations.array(),
10270 available_stall_durations.size());
10271
10272 //QCAMERA3_OPAQUE_RAW
10273 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10274 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10275 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10276 case LEGACY_RAW:
10277 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10278 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10279 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10280 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10281 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10282 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10283 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10284 break;
10285 case MIPI_RAW:
10286 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10287 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10288 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10289 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10290 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10291 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10292 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10293 break;
10294 default:
10295 LOGE("unknown opaque_raw_format %d",
10296 gCamCapability[cameraId]->opaque_raw_fmt);
10297 break;
10298 }
10299 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10300
10301 Vector<int32_t> strides;
10302 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10303 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10304 cam_stream_buf_plane_info_t buf_planes;
10305 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10306 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10307 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10308 &gCamCapability[cameraId]->padding_info, &buf_planes);
10309 strides.add(buf_planes.plane_info.mp[0].stride);
10310 }
10311 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10312 strides.size());
10313
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010314 //TBD: remove the following line once backend advertises zzHDR in feature mask
10315 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010316 //Video HDR default
10317 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10318 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010319 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010320 int32_t vhdr_mode[] = {
10321 QCAMERA3_VIDEO_HDR_MODE_OFF,
10322 QCAMERA3_VIDEO_HDR_MODE_ON};
10323
10324 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10325 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10326 vhdr_mode, vhdr_mode_count);
10327 }
10328
Thierry Strudel3d639192016-09-09 11:52:26 -070010329 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10330 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10331 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10332
10333 uint8_t isMonoOnly =
10334 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10335 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10336 &isMonoOnly, 1);
10337
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010338#ifndef USE_HAL_3_3
10339 Vector<int32_t> opaque_size;
10340 for (size_t j = 0; j < scalar_formats_count; j++) {
10341 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10342 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10343 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10344 cam_stream_buf_plane_info_t buf_planes;
10345
10346 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10347 &gCamCapability[cameraId]->padding_info, &buf_planes);
10348
10349 if (rc == 0) {
10350 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10351 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10352 opaque_size.add(buf_planes.plane_info.frame_len);
10353 }else {
10354 LOGE("raw frame calculation failed!");
10355 }
10356 }
10357 }
10358 }
10359
10360 if ((opaque_size.size() > 0) &&
10361 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10362 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10363 else
10364 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10365#endif
10366
Thierry Strudel04e026f2016-10-10 11:27:36 -070010367 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10368 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10369 size = 0;
10370 count = CAM_IR_MODE_MAX;
10371 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10372 for (size_t i = 0; i < count; i++) {
10373 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10374 gCamCapability[cameraId]->supported_ir_modes[i]);
10375 if (NAME_NOT_FOUND != val) {
10376 avail_ir_modes[size] = (int32_t)val;
10377 size++;
10378 }
10379 }
10380 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10381 avail_ir_modes, size);
10382 }
10383
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010384 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10385 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10386 size = 0;
10387 count = CAM_AEC_CONVERGENCE_MAX;
10388 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10389 for (size_t i = 0; i < count; i++) {
10390 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10391 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10392 if (NAME_NOT_FOUND != val) {
10393 available_instant_aec_modes[size] = (int32_t)val;
10394 size++;
10395 }
10396 }
10397 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10398 available_instant_aec_modes, size);
10399 }
10400
Thierry Strudel54dc9782017-02-15 12:12:10 -080010401 int32_t sharpness_range[] = {
10402 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10403 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10404 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10405
10406 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10407 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10408 size = 0;
10409 count = CAM_BINNING_CORRECTION_MODE_MAX;
10410 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10411 for (size_t i = 0; i < count; i++) {
10412 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10413 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10414 gCamCapability[cameraId]->supported_binning_modes[i]);
10415 if (NAME_NOT_FOUND != val) {
10416 avail_binning_modes[size] = (int32_t)val;
10417 size++;
10418 }
10419 }
10420 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10421 avail_binning_modes, size);
10422 }
10423
10424 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10425 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10426 size = 0;
10427 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10428 for (size_t i = 0; i < count; i++) {
10429 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10430 gCamCapability[cameraId]->supported_aec_modes[i]);
10431 if (NAME_NOT_FOUND != val)
10432 available_aec_modes[size++] = val;
10433 }
10434 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10435 available_aec_modes, size);
10436 }
10437
10438 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10439 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10440 size = 0;
10441 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10442 for (size_t i = 0; i < count; i++) {
10443 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10444 gCamCapability[cameraId]->supported_iso_modes[i]);
10445 if (NAME_NOT_FOUND != val)
10446 available_iso_modes[size++] = val;
10447 }
10448 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10449 available_iso_modes, size);
10450 }
10451
10452 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010453 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010454 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10455 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10456 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10457
10458 int32_t available_saturation_range[4];
10459 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10460 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10461 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10462 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10463 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10464 available_saturation_range, 4);
10465
10466 uint8_t is_hdr_values[2];
10467 is_hdr_values[0] = 0;
10468 is_hdr_values[1] = 1;
10469 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10470 is_hdr_values, 2);
10471
10472 float is_hdr_confidence_range[2];
10473 is_hdr_confidence_range[0] = 0.0;
10474 is_hdr_confidence_range[1] = 1.0;
10475 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10476 is_hdr_confidence_range, 2);
10477
Emilian Peev0a972ef2017-03-16 10:25:53 +000010478 size_t eepromLength = strnlen(
10479 reinterpret_cast<const char *>(
10480 gCamCapability[cameraId]->eeprom_version_info),
10481 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10482 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010483 char easelInfo[] = ",E:N";
10484 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10485 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10486 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010487 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10488 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010489 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010490 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10491 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10492 }
10493
Thierry Strudel3d639192016-09-09 11:52:26 -070010494 gStaticMetadata[cameraId] = staticInfo.release();
10495 return rc;
10496}
10497
10498/*===========================================================================
10499 * FUNCTION : makeTable
10500 *
10501 * DESCRIPTION: make a table of sizes
10502 *
10503 * PARAMETERS :
10504 *
10505 *
10506 *==========================================================================*/
10507void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10508 size_t max_size, int32_t *sizeTable)
10509{
10510 size_t j = 0;
10511 if (size > max_size) {
10512 size = max_size;
10513 }
10514 for (size_t i = 0; i < size; i++) {
10515 sizeTable[j] = dimTable[i].width;
10516 sizeTable[j+1] = dimTable[i].height;
10517 j+=2;
10518 }
10519}
10520
10521/*===========================================================================
10522 * FUNCTION : makeFPSTable
10523 *
10524 * DESCRIPTION: make a table of fps ranges
10525 *
10526 * PARAMETERS :
10527 *
10528 *==========================================================================*/
10529void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10530 size_t max_size, int32_t *fpsRangesTable)
10531{
10532 size_t j = 0;
10533 if (size > max_size) {
10534 size = max_size;
10535 }
10536 for (size_t i = 0; i < size; i++) {
10537 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10538 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10539 j+=2;
10540 }
10541}
10542
10543/*===========================================================================
10544 * FUNCTION : makeOverridesList
10545 *
10546 * DESCRIPTION: make a list of scene mode overrides
10547 *
10548 * PARAMETERS :
10549 *
10550 *
10551 *==========================================================================*/
10552void QCamera3HardwareInterface::makeOverridesList(
10553 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10554 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10555{
10556 /*daemon will give a list of overrides for all scene modes.
10557 However we should send the fwk only the overrides for the scene modes
10558 supported by the framework*/
10559 size_t j = 0;
10560 if (size > max_size) {
10561 size = max_size;
10562 }
10563 size_t focus_count = CAM_FOCUS_MODE_MAX;
10564 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10565 focus_count);
10566 for (size_t i = 0; i < size; i++) {
10567 bool supt = false;
10568 size_t index = supported_indexes[i];
10569 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10570 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10571 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10572 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10573 overridesTable[index].awb_mode);
10574 if (NAME_NOT_FOUND != val) {
10575 overridesList[j+1] = (uint8_t)val;
10576 }
10577 uint8_t focus_override = overridesTable[index].af_mode;
10578 for (size_t k = 0; k < focus_count; k++) {
10579 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10580 supt = true;
10581 break;
10582 }
10583 }
10584 if (supt) {
10585 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10586 focus_override);
10587 if (NAME_NOT_FOUND != val) {
10588 overridesList[j+2] = (uint8_t)val;
10589 }
10590 } else {
10591 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10592 }
10593 j+=3;
10594 }
10595}
10596
10597/*===========================================================================
10598 * FUNCTION : filterJpegSizes
10599 *
10600 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10601 * could be downscaled to
10602 *
10603 * PARAMETERS :
10604 *
10605 * RETURN : length of jpegSizes array
10606 *==========================================================================*/
10607
10608size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10609 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10610 uint8_t downscale_factor)
10611{
10612 if (0 == downscale_factor) {
10613 downscale_factor = 1;
10614 }
10615
10616 int32_t min_width = active_array_size.width / downscale_factor;
10617 int32_t min_height = active_array_size.height / downscale_factor;
10618 size_t jpegSizesCnt = 0;
10619 if (processedSizesCnt > maxCount) {
10620 processedSizesCnt = maxCount;
10621 }
10622 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10623 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10624 jpegSizes[jpegSizesCnt] = processedSizes[i];
10625 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10626 jpegSizesCnt += 2;
10627 }
10628 }
10629 return jpegSizesCnt;
10630}
10631
10632/*===========================================================================
10633 * FUNCTION : computeNoiseModelEntryS
10634 *
10635 * DESCRIPTION: function to map a given sensitivity to the S noise
10636 * model parameters in the DNG noise model.
10637 *
10638 * PARAMETERS : sens : the sensor sensitivity
10639 *
10640 ** RETURN : S (sensor amplification) noise
10641 *
10642 *==========================================================================*/
10643double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10644 double s = gCamCapability[mCameraId]->gradient_S * sens +
10645 gCamCapability[mCameraId]->offset_S;
10646 return ((s < 0.0) ? 0.0 : s);
10647}
10648
10649/*===========================================================================
10650 * FUNCTION : computeNoiseModelEntryO
10651 *
10652 * DESCRIPTION: function to map a given sensitivity to the O noise
10653 * model parameters in the DNG noise model.
10654 *
10655 * PARAMETERS : sens : the sensor sensitivity
10656 *
10657 ** RETURN : O (sensor readout) noise
10658 *
10659 *==========================================================================*/
10660double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10661 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10662 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10663 1.0 : (1.0 * sens / max_analog_sens);
10664 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10665 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10666 return ((o < 0.0) ? 0.0 : o);
10667}
10668
10669/*===========================================================================
10670 * FUNCTION : getSensorSensitivity
10671 *
10672 * DESCRIPTION: convert iso_mode to an integer value
10673 *
10674 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10675 *
10676 ** RETURN : sensitivity supported by sensor
10677 *
10678 *==========================================================================*/
10679int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10680{
10681 int32_t sensitivity;
10682
10683 switch (iso_mode) {
10684 case CAM_ISO_MODE_100:
10685 sensitivity = 100;
10686 break;
10687 case CAM_ISO_MODE_200:
10688 sensitivity = 200;
10689 break;
10690 case CAM_ISO_MODE_400:
10691 sensitivity = 400;
10692 break;
10693 case CAM_ISO_MODE_800:
10694 sensitivity = 800;
10695 break;
10696 case CAM_ISO_MODE_1600:
10697 sensitivity = 1600;
10698 break;
10699 default:
10700 sensitivity = -1;
10701 break;
10702 }
10703 return sensitivity;
10704}
10705
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010706int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010707 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010708 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10709 // to connect to Easel.
10710 bool doNotpowerOnEasel =
10711 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10712
10713 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010714 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10715 return OK;
10716 }
10717
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010718 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010719 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010720 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010721 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010722 return res;
10723 }
10724
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010725 EaselManagerClientOpened = true;
10726
10727 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010728 if (res != OK) {
10729 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10730 }
10731
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010732 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010733 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010734
10735 // Expose enableZsl key only when HDR+ mode is enabled.
10736 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010737 }
10738
10739 return OK;
10740}
10741
Thierry Strudel3d639192016-09-09 11:52:26 -070010742/*===========================================================================
10743 * FUNCTION : getCamInfo
10744 *
10745 * DESCRIPTION: query camera capabilities
10746 *
10747 * PARAMETERS :
10748 * @cameraId : camera Id
10749 * @info : camera info struct to be filled in with camera capabilities
10750 *
10751 * RETURN : int type of status
10752 * NO_ERROR -- success
10753 * none-zero failure code
10754 *==========================================================================*/
10755int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10756 struct camera_info *info)
10757{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010758 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010759 int rc = 0;
10760
10761 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010762
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010763 {
10764 Mutex::Autolock l(gHdrPlusClientLock);
10765 rc = initHdrPlusClientLocked();
10766 if (rc != OK) {
10767 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10768 pthread_mutex_unlock(&gCamLock);
10769 return rc;
10770 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010771 }
10772
Thierry Strudel3d639192016-09-09 11:52:26 -070010773 if (NULL == gCamCapability[cameraId]) {
10774 rc = initCapabilities(cameraId);
10775 if (rc < 0) {
10776 pthread_mutex_unlock(&gCamLock);
10777 return rc;
10778 }
10779 }
10780
10781 if (NULL == gStaticMetadata[cameraId]) {
10782 rc = initStaticMetadata(cameraId);
10783 if (rc < 0) {
10784 pthread_mutex_unlock(&gCamLock);
10785 return rc;
10786 }
10787 }
10788
10789 switch(gCamCapability[cameraId]->position) {
10790 case CAM_POSITION_BACK:
10791 case CAM_POSITION_BACK_AUX:
10792 info->facing = CAMERA_FACING_BACK;
10793 break;
10794
10795 case CAM_POSITION_FRONT:
10796 case CAM_POSITION_FRONT_AUX:
10797 info->facing = CAMERA_FACING_FRONT;
10798 break;
10799
10800 default:
10801 LOGE("Unknown position type %d for camera id:%d",
10802 gCamCapability[cameraId]->position, cameraId);
10803 rc = -1;
10804 break;
10805 }
10806
10807
10808 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010809#ifndef USE_HAL_3_3
10810 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10811#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010812 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010813#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010814 info->static_camera_characteristics = gStaticMetadata[cameraId];
10815
10816 //For now assume both cameras can operate independently.
10817 info->conflicting_devices = NULL;
10818 info->conflicting_devices_length = 0;
10819
10820 //resource cost is 100 * MIN(1.0, m/M),
10821 //where m is throughput requirement with maximum stream configuration
10822 //and M is CPP maximum throughput.
10823 float max_fps = 0.0;
10824 for (uint32_t i = 0;
10825 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10826 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10827 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10828 }
10829 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10830 gCamCapability[cameraId]->active_array_size.width *
10831 gCamCapability[cameraId]->active_array_size.height * max_fps /
10832 gCamCapability[cameraId]->max_pixel_bandwidth;
10833 info->resource_cost = 100 * MIN(1.0, ratio);
10834 LOGI("camera %d resource cost is %d", cameraId,
10835 info->resource_cost);
10836
10837 pthread_mutex_unlock(&gCamLock);
10838 return rc;
10839}
10840
10841/*===========================================================================
10842 * FUNCTION : translateCapabilityToMetadata
10843 *
10844 * DESCRIPTION: translate the capability into camera_metadata_t
10845 *
10846 * PARAMETERS : type of the request
10847 *
10848 *
10849 * RETURN : success: camera_metadata_t*
10850 * failure: NULL
10851 *
10852 *==========================================================================*/
10853camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10854{
10855 if (mDefaultMetadata[type] != NULL) {
10856 return mDefaultMetadata[type];
10857 }
10858 //first time we are handling this request
10859 //fill up the metadata structure using the wrapper class
10860 CameraMetadata settings;
10861 //translate from cam_capability_t to camera_metadata_tag_t
10862 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10863 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10864 int32_t defaultRequestID = 0;
10865 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10866
10867 /* OIS disable */
10868 char ois_prop[PROPERTY_VALUE_MAX];
10869 memset(ois_prop, 0, sizeof(ois_prop));
10870 property_get("persist.camera.ois.disable", ois_prop, "0");
10871 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10872
10873 /* Force video to use OIS */
10874 char videoOisProp[PROPERTY_VALUE_MAX];
10875 memset(videoOisProp, 0, sizeof(videoOisProp));
10876 property_get("persist.camera.ois.video", videoOisProp, "1");
10877 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010878
10879 // Hybrid AE enable/disable
10880 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10881 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10882 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10883 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10884
Thierry Strudel3d639192016-09-09 11:52:26 -070010885 uint8_t controlIntent = 0;
10886 uint8_t focusMode;
10887 uint8_t vsMode;
10888 uint8_t optStabMode;
10889 uint8_t cacMode;
10890 uint8_t edge_mode;
10891 uint8_t noise_red_mode;
10892 uint8_t tonemap_mode;
10893 bool highQualityModeEntryAvailable = FALSE;
10894 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010895 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010896 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10897 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010898 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010899 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010900 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010901
Thierry Strudel3d639192016-09-09 11:52:26 -070010902 switch (type) {
10903 case CAMERA3_TEMPLATE_PREVIEW:
10904 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10905 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10906 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10907 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10908 edge_mode = ANDROID_EDGE_MODE_FAST;
10909 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10910 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10911 break;
10912 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10913 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10914 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10915 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10916 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10917 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10918 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10919 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10920 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10921 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10922 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10923 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10924 highQualityModeEntryAvailable = TRUE;
10925 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10926 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10927 fastModeEntryAvailable = TRUE;
10928 }
10929 }
10930 if (highQualityModeEntryAvailable) {
10931 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10932 } else if (fastModeEntryAvailable) {
10933 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10934 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010935 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10936 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10937 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010938 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010939 break;
10940 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10941 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10942 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10943 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010944 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10945 edge_mode = ANDROID_EDGE_MODE_FAST;
10946 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10947 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10948 if (forceVideoOis)
10949 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10950 break;
10951 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10952 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10953 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10954 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010955 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10956 edge_mode = ANDROID_EDGE_MODE_FAST;
10957 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10958 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10959 if (forceVideoOis)
10960 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10961 break;
10962 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10963 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10964 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10965 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10966 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10967 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10968 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10969 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10970 break;
10971 case CAMERA3_TEMPLATE_MANUAL:
10972 edge_mode = ANDROID_EDGE_MODE_FAST;
10973 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10974 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10975 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10976 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10977 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10978 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10979 break;
10980 default:
10981 edge_mode = ANDROID_EDGE_MODE_FAST;
10982 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10983 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10984 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10985 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10986 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10987 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10988 break;
10989 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010990 // Set CAC to OFF if underlying device doesn't support
10991 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10992 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10993 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010994 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10995 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10996 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10997 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10998 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10999 }
11000 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011001 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011002 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011003
11004 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11005 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11006 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11007 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11008 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11009 || ois_disable)
11010 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11011 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011012 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011013
11014 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11015 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11016
11017 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11018 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11019
11020 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11021 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11022
11023 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11024 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11025
11026 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11027 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11028
11029 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11030 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11031
11032 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11033 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11034
11035 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11036 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11037
11038 /*flash*/
11039 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11040 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11041
11042 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11043 settings.update(ANDROID_FLASH_FIRING_POWER,
11044 &flashFiringLevel, 1);
11045
11046 /* lens */
11047 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11048 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11049
11050 if (gCamCapability[mCameraId]->filter_densities_count) {
11051 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11052 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11053 gCamCapability[mCameraId]->filter_densities_count);
11054 }
11055
11056 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11057 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11058
Thierry Strudel3d639192016-09-09 11:52:26 -070011059 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11060 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11061
11062 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11063 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11064
11065 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11066 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11067
11068 /* face detection (default to OFF) */
11069 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11070 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11071
Thierry Strudel54dc9782017-02-15 12:12:10 -080011072 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11073 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011074
11075 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11076 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11077
11078 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11079 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11080
Thierry Strudel3d639192016-09-09 11:52:26 -070011081
11082 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11083 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11084
11085 /* Exposure time(Update the Min Exposure Time)*/
11086 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11087 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11088
11089 /* frame duration */
11090 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11091 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11092
11093 /* sensitivity */
11094 static const int32_t default_sensitivity = 100;
11095 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011096#ifndef USE_HAL_3_3
11097 static const int32_t default_isp_sensitivity =
11098 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11099 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11100#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011101
11102 /*edge mode*/
11103 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11104
11105 /*noise reduction mode*/
11106 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11107
11108 /*color correction mode*/
11109 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11110 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11111
11112 /*transform matrix mode*/
11113 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11114
11115 int32_t scaler_crop_region[4];
11116 scaler_crop_region[0] = 0;
11117 scaler_crop_region[1] = 0;
11118 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11119 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11120 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11121
11122 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11123 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11124
11125 /*focus distance*/
11126 float focus_distance = 0.0;
11127 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11128
11129 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011130 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011131 float max_range = 0.0;
11132 float max_fixed_fps = 0.0;
11133 int32_t fps_range[2] = {0, 0};
11134 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11135 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011136 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11137 TEMPLATE_MAX_PREVIEW_FPS) {
11138 continue;
11139 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011140 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11141 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11142 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11143 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11144 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11145 if (range > max_range) {
11146 fps_range[0] =
11147 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11148 fps_range[1] =
11149 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11150 max_range = range;
11151 }
11152 } else {
11153 if (range < 0.01 && max_fixed_fps <
11154 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11155 fps_range[0] =
11156 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11157 fps_range[1] =
11158 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11159 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11160 }
11161 }
11162 }
11163 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11164
11165 /*precapture trigger*/
11166 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11167 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11168
11169 /*af trigger*/
11170 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11171 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11172
11173 /* ae & af regions */
11174 int32_t active_region[] = {
11175 gCamCapability[mCameraId]->active_array_size.left,
11176 gCamCapability[mCameraId]->active_array_size.top,
11177 gCamCapability[mCameraId]->active_array_size.left +
11178 gCamCapability[mCameraId]->active_array_size.width,
11179 gCamCapability[mCameraId]->active_array_size.top +
11180 gCamCapability[mCameraId]->active_array_size.height,
11181 0};
11182 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11183 sizeof(active_region) / sizeof(active_region[0]));
11184 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11185 sizeof(active_region) / sizeof(active_region[0]));
11186
11187 /* black level lock */
11188 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11189 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11190
Thierry Strudel3d639192016-09-09 11:52:26 -070011191 //special defaults for manual template
11192 if (type == CAMERA3_TEMPLATE_MANUAL) {
11193 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11194 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11195
11196 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11197 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11198
11199 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11200 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11201
11202 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11203 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11204
11205 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11206 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11207
11208 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11209 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11210 }
11211
11212
11213 /* TNR
11214 * We'll use this location to determine which modes TNR will be set.
11215 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11216 * This is not to be confused with linking on a per stream basis that decision
11217 * is still on per-session basis and will be handled as part of config stream
11218 */
11219 uint8_t tnr_enable = 0;
11220
11221 if (m_bTnrPreview || m_bTnrVideo) {
11222
11223 switch (type) {
11224 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11225 tnr_enable = 1;
11226 break;
11227
11228 default:
11229 tnr_enable = 0;
11230 break;
11231 }
11232
11233 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11234 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11235 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11236
11237 LOGD("TNR:%d with process plate %d for template:%d",
11238 tnr_enable, tnr_process_type, type);
11239 }
11240
11241 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011242 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011243 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11244
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011245 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011246 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11247
Shuzhen Wang920ea402017-05-03 08:49:39 -070011248 uint8_t related_camera_id = mCameraId;
11249 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011250
11251 /* CDS default */
11252 char prop[PROPERTY_VALUE_MAX];
11253 memset(prop, 0, sizeof(prop));
11254 property_get("persist.camera.CDS", prop, "Auto");
11255 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11256 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11257 if (CAM_CDS_MODE_MAX == cds_mode) {
11258 cds_mode = CAM_CDS_MODE_AUTO;
11259 }
11260
11261 /* Disabling CDS in templates which have TNR enabled*/
11262 if (tnr_enable)
11263 cds_mode = CAM_CDS_MODE_OFF;
11264
11265 int32_t mode = cds_mode;
11266 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011267
Thierry Strudel269c81a2016-10-12 12:13:59 -070011268 /* Manual Convergence AEC Speed is disabled by default*/
11269 float default_aec_speed = 0;
11270 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11271
11272 /* Manual Convergence AWB Speed is disabled by default*/
11273 float default_awb_speed = 0;
11274 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11275
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011276 // Set instant AEC to normal convergence by default
11277 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11278 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11279
Shuzhen Wang19463d72016-03-08 11:09:52 -080011280 /* hybrid ae */
11281 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11282
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011283 if (gExposeEnableZslKey) {
11284 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11285 }
11286
Thierry Strudel3d639192016-09-09 11:52:26 -070011287 mDefaultMetadata[type] = settings.release();
11288
11289 return mDefaultMetadata[type];
11290}
11291
11292/*===========================================================================
11293 * FUNCTION : setFrameParameters
11294 *
11295 * DESCRIPTION: set parameters per frame as requested in the metadata from
11296 * framework
11297 *
11298 * PARAMETERS :
11299 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011300 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011301 * @blob_request: Whether this request is a blob request or not
11302 *
11303 * RETURN : success: NO_ERROR
11304 * failure:
11305 *==========================================================================*/
11306int QCamera3HardwareInterface::setFrameParameters(
11307 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011308 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011309 int blob_request,
11310 uint32_t snapshotStreamId)
11311{
11312 /*translate from camera_metadata_t type to parm_type_t*/
11313 int rc = 0;
11314 int32_t hal_version = CAM_HAL_V3;
11315
11316 clear_metadata_buffer(mParameters);
11317 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11318 LOGE("Failed to set hal version in the parameters");
11319 return BAD_VALUE;
11320 }
11321
11322 /*we need to update the frame number in the parameters*/
11323 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11324 request->frame_number)) {
11325 LOGE("Failed to set the frame number in the parameters");
11326 return BAD_VALUE;
11327 }
11328
11329 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011330 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011331 LOGE("Failed to set stream type mask in the parameters");
11332 return BAD_VALUE;
11333 }
11334
11335 if (mUpdateDebugLevel) {
11336 uint32_t dummyDebugLevel = 0;
11337 /* The value of dummyDebugLevel is irrelavent. On
11338 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11339 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11340 dummyDebugLevel)) {
11341 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11342 return BAD_VALUE;
11343 }
11344 mUpdateDebugLevel = false;
11345 }
11346
11347 if(request->settings != NULL){
11348 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11349 if (blob_request)
11350 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11351 }
11352
11353 return rc;
11354}
11355
11356/*===========================================================================
11357 * FUNCTION : setReprocParameters
11358 *
11359 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11360 * return it.
11361 *
11362 * PARAMETERS :
11363 * @request : request that needs to be serviced
11364 *
11365 * RETURN : success: NO_ERROR
11366 * failure:
11367 *==========================================================================*/
11368int32_t QCamera3HardwareInterface::setReprocParameters(
11369 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11370 uint32_t snapshotStreamId)
11371{
11372 /*translate from camera_metadata_t type to parm_type_t*/
11373 int rc = 0;
11374
11375 if (NULL == request->settings){
11376 LOGE("Reprocess settings cannot be NULL");
11377 return BAD_VALUE;
11378 }
11379
11380 if (NULL == reprocParam) {
11381 LOGE("Invalid reprocessing metadata buffer");
11382 return BAD_VALUE;
11383 }
11384 clear_metadata_buffer(reprocParam);
11385
11386 /*we need to update the frame number in the parameters*/
11387 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11388 request->frame_number)) {
11389 LOGE("Failed to set the frame number in the parameters");
11390 return BAD_VALUE;
11391 }
11392
11393 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11394 if (rc < 0) {
11395 LOGE("Failed to translate reproc request");
11396 return rc;
11397 }
11398
11399 CameraMetadata frame_settings;
11400 frame_settings = request->settings;
11401 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11402 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11403 int32_t *crop_count =
11404 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11405 int32_t *crop_data =
11406 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11407 int32_t *roi_map =
11408 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11409 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11410 cam_crop_data_t crop_meta;
11411 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11412 crop_meta.num_of_streams = 1;
11413 crop_meta.crop_info[0].crop.left = crop_data[0];
11414 crop_meta.crop_info[0].crop.top = crop_data[1];
11415 crop_meta.crop_info[0].crop.width = crop_data[2];
11416 crop_meta.crop_info[0].crop.height = crop_data[3];
11417
11418 crop_meta.crop_info[0].roi_map.left =
11419 roi_map[0];
11420 crop_meta.crop_info[0].roi_map.top =
11421 roi_map[1];
11422 crop_meta.crop_info[0].roi_map.width =
11423 roi_map[2];
11424 crop_meta.crop_info[0].roi_map.height =
11425 roi_map[3];
11426
11427 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11428 rc = BAD_VALUE;
11429 }
11430 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11431 request->input_buffer->stream,
11432 crop_meta.crop_info[0].crop.left,
11433 crop_meta.crop_info[0].crop.top,
11434 crop_meta.crop_info[0].crop.width,
11435 crop_meta.crop_info[0].crop.height);
11436 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11437 request->input_buffer->stream,
11438 crop_meta.crop_info[0].roi_map.left,
11439 crop_meta.crop_info[0].roi_map.top,
11440 crop_meta.crop_info[0].roi_map.width,
11441 crop_meta.crop_info[0].roi_map.height);
11442 } else {
11443 LOGE("Invalid reprocess crop count %d!", *crop_count);
11444 }
11445 } else {
11446 LOGE("No crop data from matching output stream");
11447 }
11448
11449 /* These settings are not needed for regular requests so handle them specially for
11450 reprocess requests; information needed for EXIF tags */
11451 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11452 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11453 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11454 if (NAME_NOT_FOUND != val) {
11455 uint32_t flashMode = (uint32_t)val;
11456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11457 rc = BAD_VALUE;
11458 }
11459 } else {
11460 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11461 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11462 }
11463 } else {
11464 LOGH("No flash mode in reprocess settings");
11465 }
11466
11467 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11468 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11469 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11470 rc = BAD_VALUE;
11471 }
11472 } else {
11473 LOGH("No flash state in reprocess settings");
11474 }
11475
11476 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11477 uint8_t *reprocessFlags =
11478 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11479 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11480 *reprocessFlags)) {
11481 rc = BAD_VALUE;
11482 }
11483 }
11484
Thierry Strudel54dc9782017-02-15 12:12:10 -080011485 // Add exif debug data to internal metadata
11486 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11487 mm_jpeg_debug_exif_params_t *debug_params =
11488 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11489 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11490 // AE
11491 if (debug_params->ae_debug_params_valid == TRUE) {
11492 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11493 debug_params->ae_debug_params);
11494 }
11495 // AWB
11496 if (debug_params->awb_debug_params_valid == TRUE) {
11497 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11498 debug_params->awb_debug_params);
11499 }
11500 // AF
11501 if (debug_params->af_debug_params_valid == TRUE) {
11502 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11503 debug_params->af_debug_params);
11504 }
11505 // ASD
11506 if (debug_params->asd_debug_params_valid == TRUE) {
11507 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11508 debug_params->asd_debug_params);
11509 }
11510 // Stats
11511 if (debug_params->stats_debug_params_valid == TRUE) {
11512 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11513 debug_params->stats_debug_params);
11514 }
11515 // BE Stats
11516 if (debug_params->bestats_debug_params_valid == TRUE) {
11517 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11518 debug_params->bestats_debug_params);
11519 }
11520 // BHIST
11521 if (debug_params->bhist_debug_params_valid == TRUE) {
11522 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11523 debug_params->bhist_debug_params);
11524 }
11525 // 3A Tuning
11526 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11527 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11528 debug_params->q3a_tuning_debug_params);
11529 }
11530 }
11531
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011532 // Add metadata which reprocess needs
11533 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11534 cam_reprocess_info_t *repro_info =
11535 (cam_reprocess_info_t *)frame_settings.find
11536 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011537 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011538 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011539 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011540 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011541 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011542 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011543 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011544 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011545 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011546 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011547 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011548 repro_info->pipeline_flip);
11549 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11550 repro_info->af_roi);
11551 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11552 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011553 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11554 CAM_INTF_PARM_ROTATION metadata then has been added in
11555 translateToHalMetadata. HAL need to keep this new rotation
11556 metadata. Otherwise, the old rotation info saved in the vendor tag
11557 would be used */
11558 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11559 CAM_INTF_PARM_ROTATION, reprocParam) {
11560 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11561 } else {
11562 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011563 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011564 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011565 }
11566
11567 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11568 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11569 roi.width and roi.height would be the final JPEG size.
11570 For now, HAL only checks this for reprocess request */
11571 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11572 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11573 uint8_t *enable =
11574 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11575 if (*enable == TRUE) {
11576 int32_t *crop_data =
11577 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11578 cam_stream_crop_info_t crop_meta;
11579 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11580 crop_meta.stream_id = 0;
11581 crop_meta.crop.left = crop_data[0];
11582 crop_meta.crop.top = crop_data[1];
11583 crop_meta.crop.width = crop_data[2];
11584 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011585 // The JPEG crop roi should match cpp output size
11586 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11587 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11588 crop_meta.roi_map.left = 0;
11589 crop_meta.roi_map.top = 0;
11590 crop_meta.roi_map.width = cpp_crop->crop.width;
11591 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011592 }
11593 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11594 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011595 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011596 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011597 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11598 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011599 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011600 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11601
11602 // Add JPEG scale information
11603 cam_dimension_t scale_dim;
11604 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11605 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11606 int32_t *roi =
11607 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11608 scale_dim.width = roi[2];
11609 scale_dim.height = roi[3];
11610 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11611 scale_dim);
11612 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11613 scale_dim.width, scale_dim.height, mCameraId);
11614 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011615 }
11616 }
11617
11618 return rc;
11619}
11620
11621/*===========================================================================
11622 * FUNCTION : saveRequestSettings
11623 *
11624 * DESCRIPTION: Add any settings that might have changed to the request settings
11625 * and save the settings to be applied on the frame
11626 *
11627 * PARAMETERS :
11628 * @jpegMetadata : the extracted and/or modified jpeg metadata
11629 * @request : request with initial settings
11630 *
11631 * RETURN :
11632 * camera_metadata_t* : pointer to the saved request settings
11633 *==========================================================================*/
11634camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11635 const CameraMetadata &jpegMetadata,
11636 camera3_capture_request_t *request)
11637{
11638 camera_metadata_t *resultMetadata;
11639 CameraMetadata camMetadata;
11640 camMetadata = request->settings;
11641
11642 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11643 int32_t thumbnail_size[2];
11644 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11645 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11646 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11647 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11648 }
11649
11650 if (request->input_buffer != NULL) {
11651 uint8_t reprocessFlags = 1;
11652 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11653 (uint8_t*)&reprocessFlags,
11654 sizeof(reprocessFlags));
11655 }
11656
11657 resultMetadata = camMetadata.release();
11658 return resultMetadata;
11659}
11660
11661/*===========================================================================
11662 * FUNCTION : setHalFpsRange
11663 *
11664 * DESCRIPTION: set FPS range parameter
11665 *
11666 *
11667 * PARAMETERS :
11668 * @settings : Metadata from framework
11669 * @hal_metadata: Metadata buffer
11670 *
11671 *
11672 * RETURN : success: NO_ERROR
11673 * failure:
11674 *==========================================================================*/
11675int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11676 metadata_buffer_t *hal_metadata)
11677{
11678 int32_t rc = NO_ERROR;
11679 cam_fps_range_t fps_range;
11680 fps_range.min_fps = (float)
11681 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11682 fps_range.max_fps = (float)
11683 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11684 fps_range.video_min_fps = fps_range.min_fps;
11685 fps_range.video_max_fps = fps_range.max_fps;
11686
11687 LOGD("aeTargetFpsRange fps: [%f %f]",
11688 fps_range.min_fps, fps_range.max_fps);
11689 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11690 * follows:
11691 * ---------------------------------------------------------------|
11692 * Video stream is absent in configure_streams |
11693 * (Camcorder preview before the first video record |
11694 * ---------------------------------------------------------------|
11695 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11696 * | | | vid_min/max_fps|
11697 * ---------------------------------------------------------------|
11698 * NO | [ 30, 240] | 240 | [240, 240] |
11699 * |-------------|-------------|----------------|
11700 * | [240, 240] | 240 | [240, 240] |
11701 * ---------------------------------------------------------------|
11702 * Video stream is present in configure_streams |
11703 * ---------------------------------------------------------------|
11704 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11705 * | | | vid_min/max_fps|
11706 * ---------------------------------------------------------------|
11707 * NO | [ 30, 240] | 240 | [240, 240] |
11708 * (camcorder prev |-------------|-------------|----------------|
11709 * after video rec | [240, 240] | 240 | [240, 240] |
11710 * is stopped) | | | |
11711 * ---------------------------------------------------------------|
11712 * YES | [ 30, 240] | 240 | [240, 240] |
11713 * |-------------|-------------|----------------|
11714 * | [240, 240] | 240 | [240, 240] |
11715 * ---------------------------------------------------------------|
11716 * When Video stream is absent in configure_streams,
11717 * preview fps = sensor_fps / batchsize
11718 * Eg: for 240fps at batchSize 4, preview = 60fps
11719 * for 120fps at batchSize 4, preview = 30fps
11720 *
11721 * When video stream is present in configure_streams, preview fps is as per
11722 * the ratio of preview buffers to video buffers requested in process
11723 * capture request
11724 */
11725 mBatchSize = 0;
11726 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11727 fps_range.min_fps = fps_range.video_max_fps;
11728 fps_range.video_min_fps = fps_range.video_max_fps;
11729 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11730 fps_range.max_fps);
11731 if (NAME_NOT_FOUND != val) {
11732 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11733 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11734 return BAD_VALUE;
11735 }
11736
11737 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11738 /* If batchmode is currently in progress and the fps changes,
11739 * set the flag to restart the sensor */
11740 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11741 (mHFRVideoFps != fps_range.max_fps)) {
11742 mNeedSensorRestart = true;
11743 }
11744 mHFRVideoFps = fps_range.max_fps;
11745 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11746 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11747 mBatchSize = MAX_HFR_BATCH_SIZE;
11748 }
11749 }
11750 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11751
11752 }
11753 } else {
11754 /* HFR mode is session param in backend/ISP. This should be reset when
11755 * in non-HFR mode */
11756 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11757 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11758 return BAD_VALUE;
11759 }
11760 }
11761 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11762 return BAD_VALUE;
11763 }
11764 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11765 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11766 return rc;
11767}
11768
11769/*===========================================================================
11770 * FUNCTION : translateToHalMetadata
11771 *
11772 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11773 *
11774 *
11775 * PARAMETERS :
11776 * @request : request sent from framework
11777 *
11778 *
11779 * RETURN : success: NO_ERROR
11780 * failure:
11781 *==========================================================================*/
11782int QCamera3HardwareInterface::translateToHalMetadata
11783 (const camera3_capture_request_t *request,
11784 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011785 uint32_t snapshotStreamId) {
11786 if (request == nullptr || hal_metadata == nullptr) {
11787 return BAD_VALUE;
11788 }
11789
11790 int64_t minFrameDuration = getMinFrameDuration(request);
11791
11792 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11793 minFrameDuration);
11794}
11795
11796int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11797 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11798 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11799
Thierry Strudel3d639192016-09-09 11:52:26 -070011800 int rc = 0;
11801 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011802 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011803
11804 /* Do not change the order of the following list unless you know what you are
11805 * doing.
11806 * The order is laid out in such a way that parameters in the front of the table
11807 * may be used to override the parameters later in the table. Examples are:
11808 * 1. META_MODE should precede AEC/AWB/AF MODE
11809 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11810 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11811 * 4. Any mode should precede it's corresponding settings
11812 */
11813 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11814 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11815 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11816 rc = BAD_VALUE;
11817 }
11818 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11819 if (rc != NO_ERROR) {
11820 LOGE("extractSceneMode failed");
11821 }
11822 }
11823
11824 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11825 uint8_t fwk_aeMode =
11826 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11827 uint8_t aeMode;
11828 int32_t redeye;
11829
11830 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11831 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011832 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11833 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011834 } else {
11835 aeMode = CAM_AE_MODE_ON;
11836 }
11837 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11838 redeye = 1;
11839 } else {
11840 redeye = 0;
11841 }
11842
11843 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11844 fwk_aeMode);
11845 if (NAME_NOT_FOUND != val) {
11846 int32_t flashMode = (int32_t)val;
11847 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11848 }
11849
11850 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11852 rc = BAD_VALUE;
11853 }
11854 }
11855
11856 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11857 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11858 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11859 fwk_whiteLevel);
11860 if (NAME_NOT_FOUND != val) {
11861 uint8_t whiteLevel = (uint8_t)val;
11862 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11863 rc = BAD_VALUE;
11864 }
11865 }
11866 }
11867
11868 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11869 uint8_t fwk_cacMode =
11870 frame_settings.find(
11871 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11872 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11873 fwk_cacMode);
11874 if (NAME_NOT_FOUND != val) {
11875 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11876 bool entryAvailable = FALSE;
11877 // Check whether Frameworks set CAC mode is supported in device or not
11878 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11879 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11880 entryAvailable = TRUE;
11881 break;
11882 }
11883 }
11884 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11885 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11886 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11887 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11888 if (entryAvailable == FALSE) {
11889 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11890 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11891 } else {
11892 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11893 // High is not supported and so set the FAST as spec say's underlying
11894 // device implementation can be the same for both modes.
11895 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11896 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11897 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11898 // in order to avoid the fps drop due to high quality
11899 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11900 } else {
11901 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11902 }
11903 }
11904 }
11905 LOGD("Final cacMode is %d", cacMode);
11906 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11907 rc = BAD_VALUE;
11908 }
11909 } else {
11910 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11911 }
11912 }
11913
Thierry Strudel2896d122017-02-23 19:18:03 -080011914 char af_value[PROPERTY_VALUE_MAX];
11915 property_get("persist.camera.af.infinity", af_value, "0");
11916
Jason Lee84ae9972017-02-24 13:24:24 -080011917 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011918 if (atoi(af_value) == 0) {
11919 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011920 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011921 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11922 fwk_focusMode);
11923 if (NAME_NOT_FOUND != val) {
11924 uint8_t focusMode = (uint8_t)val;
11925 LOGD("set focus mode %d", focusMode);
11926 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11927 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11928 rc = BAD_VALUE;
11929 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011930 }
11931 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011932 } else {
11933 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11934 LOGE("Focus forced to infinity %d", focusMode);
11935 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11936 rc = BAD_VALUE;
11937 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011938 }
11939
Jason Lee84ae9972017-02-24 13:24:24 -080011940 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11941 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011942 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11943 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11944 focalDistance)) {
11945 rc = BAD_VALUE;
11946 }
11947 }
11948
11949 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11950 uint8_t fwk_antibandingMode =
11951 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11952 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11953 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11954 if (NAME_NOT_FOUND != val) {
11955 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011956 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11957 if (m60HzZone) {
11958 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11959 } else {
11960 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11961 }
11962 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011963 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11964 hal_antibandingMode)) {
11965 rc = BAD_VALUE;
11966 }
11967 }
11968 }
11969
11970 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11971 int32_t expCompensation = frame_settings.find(
11972 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11973 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11974 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11975 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11976 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011977 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011978 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11979 expCompensation)) {
11980 rc = BAD_VALUE;
11981 }
11982 }
11983
11984 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11985 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11986 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11987 rc = BAD_VALUE;
11988 }
11989 }
11990 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11991 rc = setHalFpsRange(frame_settings, hal_metadata);
11992 if (rc != NO_ERROR) {
11993 LOGE("setHalFpsRange failed");
11994 }
11995 }
11996
11997 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11998 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11999 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12000 rc = BAD_VALUE;
12001 }
12002 }
12003
12004 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12005 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12006 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12007 fwk_effectMode);
12008 if (NAME_NOT_FOUND != val) {
12009 uint8_t effectMode = (uint8_t)val;
12010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12011 rc = BAD_VALUE;
12012 }
12013 }
12014 }
12015
12016 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12017 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12019 colorCorrectMode)) {
12020 rc = BAD_VALUE;
12021 }
12022 }
12023
12024 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12025 cam_color_correct_gains_t colorCorrectGains;
12026 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12027 colorCorrectGains.gains[i] =
12028 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12029 }
12030 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12031 colorCorrectGains)) {
12032 rc = BAD_VALUE;
12033 }
12034 }
12035
12036 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12037 cam_color_correct_matrix_t colorCorrectTransform;
12038 cam_rational_type_t transform_elem;
12039 size_t num = 0;
12040 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12041 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12042 transform_elem.numerator =
12043 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12044 transform_elem.denominator =
12045 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12046 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12047 num++;
12048 }
12049 }
12050 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12051 colorCorrectTransform)) {
12052 rc = BAD_VALUE;
12053 }
12054 }
12055
12056 cam_trigger_t aecTrigger;
12057 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12058 aecTrigger.trigger_id = -1;
12059 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12060 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12061 aecTrigger.trigger =
12062 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12063 aecTrigger.trigger_id =
12064 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12065 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12066 aecTrigger)) {
12067 rc = BAD_VALUE;
12068 }
12069 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12070 aecTrigger.trigger, aecTrigger.trigger_id);
12071 }
12072
12073 /*af_trigger must come with a trigger id*/
12074 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12075 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12076 cam_trigger_t af_trigger;
12077 af_trigger.trigger =
12078 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12079 af_trigger.trigger_id =
12080 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12081 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12082 rc = BAD_VALUE;
12083 }
12084 LOGD("AfTrigger: %d AfTriggerID: %d",
12085 af_trigger.trigger, af_trigger.trigger_id);
12086 }
12087
12088 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12089 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12090 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12091 rc = BAD_VALUE;
12092 }
12093 }
12094 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12095 cam_edge_application_t edge_application;
12096 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012097
Thierry Strudel3d639192016-09-09 11:52:26 -070012098 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12099 edge_application.sharpness = 0;
12100 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012101 edge_application.sharpness =
12102 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12103 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12104 int32_t sharpness =
12105 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12106 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12107 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12108 LOGD("Setting edge mode sharpness %d", sharpness);
12109 edge_application.sharpness = sharpness;
12110 }
12111 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012112 }
12113 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12114 rc = BAD_VALUE;
12115 }
12116 }
12117
12118 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12119 int32_t respectFlashMode = 1;
12120 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12121 uint8_t fwk_aeMode =
12122 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012123 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12124 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12125 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012126 respectFlashMode = 0;
12127 LOGH("AE Mode controls flash, ignore android.flash.mode");
12128 }
12129 }
12130 if (respectFlashMode) {
12131 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12132 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12133 LOGH("flash mode after mapping %d", val);
12134 // To check: CAM_INTF_META_FLASH_MODE usage
12135 if (NAME_NOT_FOUND != val) {
12136 uint8_t flashMode = (uint8_t)val;
12137 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12138 rc = BAD_VALUE;
12139 }
12140 }
12141 }
12142 }
12143
12144 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12145 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12146 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12147 rc = BAD_VALUE;
12148 }
12149 }
12150
12151 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12152 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12153 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12154 flashFiringTime)) {
12155 rc = BAD_VALUE;
12156 }
12157 }
12158
12159 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12160 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12162 hotPixelMode)) {
12163 rc = BAD_VALUE;
12164 }
12165 }
12166
12167 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12168 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12169 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12170 lensAperture)) {
12171 rc = BAD_VALUE;
12172 }
12173 }
12174
12175 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12176 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12177 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12178 filterDensity)) {
12179 rc = BAD_VALUE;
12180 }
12181 }
12182
12183 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12184 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12185 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12186 focalLength)) {
12187 rc = BAD_VALUE;
12188 }
12189 }
12190
12191 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12192 uint8_t optStabMode =
12193 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12194 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12195 optStabMode)) {
12196 rc = BAD_VALUE;
12197 }
12198 }
12199
12200 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12201 uint8_t videoStabMode =
12202 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12203 LOGD("videoStabMode from APP = %d", videoStabMode);
12204 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12205 videoStabMode)) {
12206 rc = BAD_VALUE;
12207 }
12208 }
12209
12210
12211 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12212 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12213 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12214 noiseRedMode)) {
12215 rc = BAD_VALUE;
12216 }
12217 }
12218
12219 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12220 float reprocessEffectiveExposureFactor =
12221 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12222 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12223 reprocessEffectiveExposureFactor)) {
12224 rc = BAD_VALUE;
12225 }
12226 }
12227
12228 cam_crop_region_t scalerCropRegion;
12229 bool scalerCropSet = false;
12230 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12231 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12232 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12233 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12234 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12235
12236 // Map coordinate system from active array to sensor output.
12237 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12238 scalerCropRegion.width, scalerCropRegion.height);
12239
12240 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12241 scalerCropRegion)) {
12242 rc = BAD_VALUE;
12243 }
12244 scalerCropSet = true;
12245 }
12246
12247 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12248 int64_t sensorExpTime =
12249 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12250 LOGD("setting sensorExpTime %lld", sensorExpTime);
12251 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12252 sensorExpTime)) {
12253 rc = BAD_VALUE;
12254 }
12255 }
12256
12257 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12258 int64_t sensorFrameDuration =
12259 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012260 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12261 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12262 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12263 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12264 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12265 sensorFrameDuration)) {
12266 rc = BAD_VALUE;
12267 }
12268 }
12269
12270 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12271 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12272 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12273 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12274 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12275 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12276 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12277 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12278 sensorSensitivity)) {
12279 rc = BAD_VALUE;
12280 }
12281 }
12282
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012283#ifndef USE_HAL_3_3
12284 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12285 int32_t ispSensitivity =
12286 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12287 if (ispSensitivity <
12288 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12289 ispSensitivity =
12290 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12291 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12292 }
12293 if (ispSensitivity >
12294 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12295 ispSensitivity =
12296 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12297 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12298 }
12299 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12300 ispSensitivity)) {
12301 rc = BAD_VALUE;
12302 }
12303 }
12304#endif
12305
Thierry Strudel3d639192016-09-09 11:52:26 -070012306 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12307 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12308 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12309 rc = BAD_VALUE;
12310 }
12311 }
12312
12313 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12314 uint8_t fwk_facedetectMode =
12315 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12316
12317 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12318 fwk_facedetectMode);
12319
12320 if (NAME_NOT_FOUND != val) {
12321 uint8_t facedetectMode = (uint8_t)val;
12322 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12323 facedetectMode)) {
12324 rc = BAD_VALUE;
12325 }
12326 }
12327 }
12328
Thierry Strudel54dc9782017-02-15 12:12:10 -080012329 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012330 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012331 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012332 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12333 histogramMode)) {
12334 rc = BAD_VALUE;
12335 }
12336 }
12337
12338 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12339 uint8_t sharpnessMapMode =
12340 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12341 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12342 sharpnessMapMode)) {
12343 rc = BAD_VALUE;
12344 }
12345 }
12346
12347 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12348 uint8_t tonemapMode =
12349 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12350 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12351 rc = BAD_VALUE;
12352 }
12353 }
12354 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12355 /*All tonemap channels will have the same number of points*/
12356 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12357 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12358 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12359 cam_rgb_tonemap_curves tonemapCurves;
12360 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12361 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12362 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12363 tonemapCurves.tonemap_points_cnt,
12364 CAM_MAX_TONEMAP_CURVE_SIZE);
12365 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12366 }
12367
12368 /* ch0 = G*/
12369 size_t point = 0;
12370 cam_tonemap_curve_t tonemapCurveGreen;
12371 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12372 for (size_t j = 0; j < 2; j++) {
12373 tonemapCurveGreen.tonemap_points[i][j] =
12374 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12375 point++;
12376 }
12377 }
12378 tonemapCurves.curves[0] = tonemapCurveGreen;
12379
12380 /* ch 1 = B */
12381 point = 0;
12382 cam_tonemap_curve_t tonemapCurveBlue;
12383 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12384 for (size_t j = 0; j < 2; j++) {
12385 tonemapCurveBlue.tonemap_points[i][j] =
12386 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12387 point++;
12388 }
12389 }
12390 tonemapCurves.curves[1] = tonemapCurveBlue;
12391
12392 /* ch 2 = R */
12393 point = 0;
12394 cam_tonemap_curve_t tonemapCurveRed;
12395 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12396 for (size_t j = 0; j < 2; j++) {
12397 tonemapCurveRed.tonemap_points[i][j] =
12398 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12399 point++;
12400 }
12401 }
12402 tonemapCurves.curves[2] = tonemapCurveRed;
12403
12404 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12405 tonemapCurves)) {
12406 rc = BAD_VALUE;
12407 }
12408 }
12409
12410 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12411 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12412 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12413 captureIntent)) {
12414 rc = BAD_VALUE;
12415 }
12416 }
12417
12418 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12419 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12420 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12421 blackLevelLock)) {
12422 rc = BAD_VALUE;
12423 }
12424 }
12425
12426 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12427 uint8_t lensShadingMapMode =
12428 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12429 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12430 lensShadingMapMode)) {
12431 rc = BAD_VALUE;
12432 }
12433 }
12434
12435 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12436 cam_area_t roi;
12437 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012438 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012439
12440 // Map coordinate system from active array to sensor output.
12441 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12442 roi.rect.height);
12443
12444 if (scalerCropSet) {
12445 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12446 }
12447 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12448 rc = BAD_VALUE;
12449 }
12450 }
12451
12452 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12453 cam_area_t roi;
12454 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012455 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012456
12457 // Map coordinate system from active array to sensor output.
12458 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12459 roi.rect.height);
12460
12461 if (scalerCropSet) {
12462 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12463 }
12464 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12465 rc = BAD_VALUE;
12466 }
12467 }
12468
12469 // CDS for non-HFR non-video mode
12470 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12471 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12472 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12473 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12474 LOGE("Invalid CDS mode %d!", *fwk_cds);
12475 } else {
12476 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12477 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12478 rc = BAD_VALUE;
12479 }
12480 }
12481 }
12482
Thierry Strudel04e026f2016-10-10 11:27:36 -070012483 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012484 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012485 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012486 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12487 }
12488 if (m_bVideoHdrEnabled)
12489 vhdr = CAM_VIDEO_HDR_MODE_ON;
12490
Thierry Strudel54dc9782017-02-15 12:12:10 -080012491 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12492
12493 if(vhdr != curr_hdr_state)
12494 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12495
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012496 rc = setVideoHdrMode(mParameters, vhdr);
12497 if (rc != NO_ERROR) {
12498 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012499 }
12500
12501 //IR
12502 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12503 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12504 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012505 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12506 uint8_t isIRon = 0;
12507
12508 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012509 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12510 LOGE("Invalid IR mode %d!", fwk_ir);
12511 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012512 if(isIRon != curr_ir_state )
12513 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12514
Thierry Strudel04e026f2016-10-10 11:27:36 -070012515 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12516 CAM_INTF_META_IR_MODE, fwk_ir)) {
12517 rc = BAD_VALUE;
12518 }
12519 }
12520 }
12521
Thierry Strudel54dc9782017-02-15 12:12:10 -080012522 //Binning Correction Mode
12523 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12524 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12525 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12526 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12527 || (0 > fwk_binning_correction)) {
12528 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12529 } else {
12530 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12531 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12532 rc = BAD_VALUE;
12533 }
12534 }
12535 }
12536
Thierry Strudel269c81a2016-10-12 12:13:59 -070012537 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12538 float aec_speed;
12539 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12540 LOGD("AEC Speed :%f", aec_speed);
12541 if ( aec_speed < 0 ) {
12542 LOGE("Invalid AEC mode %f!", aec_speed);
12543 } else {
12544 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12545 aec_speed)) {
12546 rc = BAD_VALUE;
12547 }
12548 }
12549 }
12550
12551 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12552 float awb_speed;
12553 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12554 LOGD("AWB Speed :%f", awb_speed);
12555 if ( awb_speed < 0 ) {
12556 LOGE("Invalid AWB mode %f!", awb_speed);
12557 } else {
12558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12559 awb_speed)) {
12560 rc = BAD_VALUE;
12561 }
12562 }
12563 }
12564
Thierry Strudel3d639192016-09-09 11:52:26 -070012565 // TNR
12566 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12567 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12568 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012569 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012570 cam_denoise_param_t tnr;
12571 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12572 tnr.process_plates =
12573 (cam_denoise_process_type_t)frame_settings.find(
12574 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12575 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012576
12577 if(b_TnrRequested != curr_tnr_state)
12578 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12579
Thierry Strudel3d639192016-09-09 11:52:26 -070012580 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12581 rc = BAD_VALUE;
12582 }
12583 }
12584
Thierry Strudel54dc9782017-02-15 12:12:10 -080012585 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012586 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012587 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012588 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12589 *exposure_metering_mode)) {
12590 rc = BAD_VALUE;
12591 }
12592 }
12593
Thierry Strudel3d639192016-09-09 11:52:26 -070012594 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12595 int32_t fwk_testPatternMode =
12596 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12597 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12598 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12599
12600 if (NAME_NOT_FOUND != testPatternMode) {
12601 cam_test_pattern_data_t testPatternData;
12602 memset(&testPatternData, 0, sizeof(testPatternData));
12603 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12604 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12605 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12606 int32_t *fwk_testPatternData =
12607 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12608 testPatternData.r = fwk_testPatternData[0];
12609 testPatternData.b = fwk_testPatternData[3];
12610 switch (gCamCapability[mCameraId]->color_arrangement) {
12611 case CAM_FILTER_ARRANGEMENT_RGGB:
12612 case CAM_FILTER_ARRANGEMENT_GRBG:
12613 testPatternData.gr = fwk_testPatternData[1];
12614 testPatternData.gb = fwk_testPatternData[2];
12615 break;
12616 case CAM_FILTER_ARRANGEMENT_GBRG:
12617 case CAM_FILTER_ARRANGEMENT_BGGR:
12618 testPatternData.gr = fwk_testPatternData[2];
12619 testPatternData.gb = fwk_testPatternData[1];
12620 break;
12621 default:
12622 LOGE("color arrangement %d is not supported",
12623 gCamCapability[mCameraId]->color_arrangement);
12624 break;
12625 }
12626 }
12627 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12628 testPatternData)) {
12629 rc = BAD_VALUE;
12630 }
12631 } else {
12632 LOGE("Invalid framework sensor test pattern mode %d",
12633 fwk_testPatternMode);
12634 }
12635 }
12636
12637 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12638 size_t count = 0;
12639 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12640 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12641 gps_coords.data.d, gps_coords.count, count);
12642 if (gps_coords.count != count) {
12643 rc = BAD_VALUE;
12644 }
12645 }
12646
12647 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12648 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12649 size_t count = 0;
12650 const char *gps_methods_src = (const char *)
12651 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12652 memset(gps_methods, '\0', sizeof(gps_methods));
12653 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12654 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12655 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12656 if (GPS_PROCESSING_METHOD_SIZE != count) {
12657 rc = BAD_VALUE;
12658 }
12659 }
12660
12661 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12662 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12663 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12664 gps_timestamp)) {
12665 rc = BAD_VALUE;
12666 }
12667 }
12668
12669 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12670 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12671 cam_rotation_info_t rotation_info;
12672 if (orientation == 0) {
12673 rotation_info.rotation = ROTATE_0;
12674 } else if (orientation == 90) {
12675 rotation_info.rotation = ROTATE_90;
12676 } else if (orientation == 180) {
12677 rotation_info.rotation = ROTATE_180;
12678 } else if (orientation == 270) {
12679 rotation_info.rotation = ROTATE_270;
12680 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012681 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012682 rotation_info.streamId = snapshotStreamId;
12683 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12684 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12685 rc = BAD_VALUE;
12686 }
12687 }
12688
12689 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12690 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12691 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12692 rc = BAD_VALUE;
12693 }
12694 }
12695
12696 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12697 uint32_t thumb_quality = (uint32_t)
12698 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12700 thumb_quality)) {
12701 rc = BAD_VALUE;
12702 }
12703 }
12704
12705 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12706 cam_dimension_t dim;
12707 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12708 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12709 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12710 rc = BAD_VALUE;
12711 }
12712 }
12713
12714 // Internal metadata
12715 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12716 size_t count = 0;
12717 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12718 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12719 privatedata.data.i32, privatedata.count, count);
12720 if (privatedata.count != count) {
12721 rc = BAD_VALUE;
12722 }
12723 }
12724
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012725 // ISO/Exposure Priority
12726 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12727 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12728 cam_priority_mode_t mode =
12729 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12730 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12731 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12732 use_iso_exp_pty.previewOnly = FALSE;
12733 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12734 use_iso_exp_pty.value = *ptr;
12735
12736 if(CAM_ISO_PRIORITY == mode) {
12737 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12738 use_iso_exp_pty)) {
12739 rc = BAD_VALUE;
12740 }
12741 }
12742 else {
12743 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12744 use_iso_exp_pty)) {
12745 rc = BAD_VALUE;
12746 }
12747 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012748
12749 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12750 rc = BAD_VALUE;
12751 }
12752 }
12753 } else {
12754 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12755 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012756 }
12757 }
12758
12759 // Saturation
12760 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12761 int32_t* use_saturation =
12762 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12763 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12764 rc = BAD_VALUE;
12765 }
12766 }
12767
Thierry Strudel3d639192016-09-09 11:52:26 -070012768 // EV step
12769 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12770 gCamCapability[mCameraId]->exp_compensation_step)) {
12771 rc = BAD_VALUE;
12772 }
12773
12774 // CDS info
12775 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12776 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12777 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12778
12779 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12780 CAM_INTF_META_CDS_DATA, *cdsData)) {
12781 rc = BAD_VALUE;
12782 }
12783 }
12784
Shuzhen Wang19463d72016-03-08 11:09:52 -080012785 // Hybrid AE
12786 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12787 uint8_t *hybrid_ae = (uint8_t *)
12788 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12789
12790 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12791 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12792 rc = BAD_VALUE;
12793 }
12794 }
12795
Shuzhen Wang14415f52016-11-16 18:26:18 -080012796 // Histogram
12797 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12798 uint8_t histogramMode =
12799 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12800 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12801 histogramMode)) {
12802 rc = BAD_VALUE;
12803 }
12804 }
12805
12806 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12807 int32_t histogramBins =
12808 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12809 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12810 histogramBins)) {
12811 rc = BAD_VALUE;
12812 }
12813 }
12814
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012815 // Tracking AF
12816 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12817 uint8_t trackingAfTrigger =
12818 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12819 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12820 trackingAfTrigger)) {
12821 rc = BAD_VALUE;
12822 }
12823 }
12824
Thierry Strudel3d639192016-09-09 11:52:26 -070012825 return rc;
12826}
12827
12828/*===========================================================================
12829 * FUNCTION : captureResultCb
12830 *
12831 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12832 *
12833 * PARAMETERS :
12834 * @frame : frame information from mm-camera-interface
12835 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12836 * @userdata: userdata
12837 *
12838 * RETURN : NONE
12839 *==========================================================================*/
12840void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12841 camera3_stream_buffer_t *buffer,
12842 uint32_t frame_number, bool isInputBuffer, void *userdata)
12843{
12844 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12845 if (hw == NULL) {
12846 LOGE("Invalid hw %p", hw);
12847 return;
12848 }
12849
12850 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12851 return;
12852}
12853
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012854/*===========================================================================
12855 * FUNCTION : setBufferErrorStatus
12856 *
12857 * DESCRIPTION: Callback handler for channels to report any buffer errors
12858 *
12859 * PARAMETERS :
12860 * @ch : Channel on which buffer error is reported from
12861 * @frame_number : frame number on which buffer error is reported on
12862 * @buffer_status : buffer error status
12863 * @userdata: userdata
12864 *
12865 * RETURN : NONE
12866 *==========================================================================*/
12867void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12868 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12869{
12870 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12871 if (hw == NULL) {
12872 LOGE("Invalid hw %p", hw);
12873 return;
12874 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012875
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012876 hw->setBufferErrorStatus(ch, frame_number, err);
12877 return;
12878}
12879
12880void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12881 uint32_t frameNumber, camera3_buffer_status_t err)
12882{
12883 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12884 pthread_mutex_lock(&mMutex);
12885
12886 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12887 if (req.frame_number != frameNumber)
12888 continue;
12889 for (auto& k : req.mPendingBufferList) {
12890 if(k.stream->priv == ch) {
12891 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12892 }
12893 }
12894 }
12895
12896 pthread_mutex_unlock(&mMutex);
12897 return;
12898}
Thierry Strudel3d639192016-09-09 11:52:26 -070012899/*===========================================================================
12900 * FUNCTION : initialize
12901 *
12902 * DESCRIPTION: Pass framework callback pointers to HAL
12903 *
12904 * PARAMETERS :
12905 *
12906 *
12907 * RETURN : Success : 0
12908 * Failure: -ENODEV
12909 *==========================================================================*/
12910
12911int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12912 const camera3_callback_ops_t *callback_ops)
12913{
12914 LOGD("E");
12915 QCamera3HardwareInterface *hw =
12916 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12917 if (!hw) {
12918 LOGE("NULL camera device");
12919 return -ENODEV;
12920 }
12921
12922 int rc = hw->initialize(callback_ops);
12923 LOGD("X");
12924 return rc;
12925}
12926
12927/*===========================================================================
12928 * FUNCTION : configure_streams
12929 *
12930 * DESCRIPTION:
12931 *
12932 * PARAMETERS :
12933 *
12934 *
12935 * RETURN : Success: 0
12936 * Failure: -EINVAL (if stream configuration is invalid)
12937 * -ENODEV (fatal error)
12938 *==========================================================================*/
12939
12940int QCamera3HardwareInterface::configure_streams(
12941 const struct camera3_device *device,
12942 camera3_stream_configuration_t *stream_list)
12943{
12944 LOGD("E");
12945 QCamera3HardwareInterface *hw =
12946 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12947 if (!hw) {
12948 LOGE("NULL camera device");
12949 return -ENODEV;
12950 }
12951 int rc = hw->configureStreams(stream_list);
12952 LOGD("X");
12953 return rc;
12954}
12955
12956/*===========================================================================
12957 * FUNCTION : construct_default_request_settings
12958 *
12959 * DESCRIPTION: Configure a settings buffer to meet the required use case
12960 *
12961 * PARAMETERS :
12962 *
12963 *
12964 * RETURN : Success: Return valid metadata
12965 * Failure: Return NULL
12966 *==========================================================================*/
12967const camera_metadata_t* QCamera3HardwareInterface::
12968 construct_default_request_settings(const struct camera3_device *device,
12969 int type)
12970{
12971
12972 LOGD("E");
12973 camera_metadata_t* fwk_metadata = NULL;
12974 QCamera3HardwareInterface *hw =
12975 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12976 if (!hw) {
12977 LOGE("NULL camera device");
12978 return NULL;
12979 }
12980
12981 fwk_metadata = hw->translateCapabilityToMetadata(type);
12982
12983 LOGD("X");
12984 return fwk_metadata;
12985}
12986
12987/*===========================================================================
12988 * FUNCTION : process_capture_request
12989 *
12990 * DESCRIPTION:
12991 *
12992 * PARAMETERS :
12993 *
12994 *
12995 * RETURN :
12996 *==========================================================================*/
12997int QCamera3HardwareInterface::process_capture_request(
12998 const struct camera3_device *device,
12999 camera3_capture_request_t *request)
13000{
13001 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013002 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013003 QCamera3HardwareInterface *hw =
13004 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13005 if (!hw) {
13006 LOGE("NULL camera device");
13007 return -EINVAL;
13008 }
13009
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013010 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013011 LOGD("X");
13012 return rc;
13013}
13014
13015/*===========================================================================
13016 * FUNCTION : dump
13017 *
13018 * DESCRIPTION:
13019 *
13020 * PARAMETERS :
13021 *
13022 *
13023 * RETURN :
13024 *==========================================================================*/
13025
13026void QCamera3HardwareInterface::dump(
13027 const struct camera3_device *device, int fd)
13028{
13029 /* Log level property is read when "adb shell dumpsys media.camera" is
13030 called so that the log level can be controlled without restarting
13031 the media server */
13032 getLogLevel();
13033
13034 LOGD("E");
13035 QCamera3HardwareInterface *hw =
13036 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13037 if (!hw) {
13038 LOGE("NULL camera device");
13039 return;
13040 }
13041
13042 hw->dump(fd);
13043 LOGD("X");
13044 return;
13045}
13046
13047/*===========================================================================
13048 * FUNCTION : flush
13049 *
13050 * DESCRIPTION:
13051 *
13052 * PARAMETERS :
13053 *
13054 *
13055 * RETURN :
13056 *==========================================================================*/
13057
13058int QCamera3HardwareInterface::flush(
13059 const struct camera3_device *device)
13060{
13061 int rc;
13062 LOGD("E");
13063 QCamera3HardwareInterface *hw =
13064 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13065 if (!hw) {
13066 LOGE("NULL camera device");
13067 return -EINVAL;
13068 }
13069
13070 pthread_mutex_lock(&hw->mMutex);
13071 // Validate current state
13072 switch (hw->mState) {
13073 case STARTED:
13074 /* valid state */
13075 break;
13076
13077 case ERROR:
13078 pthread_mutex_unlock(&hw->mMutex);
13079 hw->handleCameraDeviceError();
13080 return -ENODEV;
13081
13082 default:
13083 LOGI("Flush returned during state %d", hw->mState);
13084 pthread_mutex_unlock(&hw->mMutex);
13085 return 0;
13086 }
13087 pthread_mutex_unlock(&hw->mMutex);
13088
13089 rc = hw->flush(true /* restart channels */ );
13090 LOGD("X");
13091 return rc;
13092}
13093
13094/*===========================================================================
13095 * FUNCTION : close_camera_device
13096 *
13097 * DESCRIPTION:
13098 *
13099 * PARAMETERS :
13100 *
13101 *
13102 * RETURN :
13103 *==========================================================================*/
13104int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13105{
13106 int ret = NO_ERROR;
13107 QCamera3HardwareInterface *hw =
13108 reinterpret_cast<QCamera3HardwareInterface *>(
13109 reinterpret_cast<camera3_device_t *>(device)->priv);
13110 if (!hw) {
13111 LOGE("NULL camera device");
13112 return BAD_VALUE;
13113 }
13114
13115 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13116 delete hw;
13117 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013118 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013119 return ret;
13120}
13121
13122/*===========================================================================
13123 * FUNCTION : getWaveletDenoiseProcessPlate
13124 *
13125 * DESCRIPTION: query wavelet denoise process plate
13126 *
13127 * PARAMETERS : None
13128 *
13129 * RETURN : WNR prcocess plate value
13130 *==========================================================================*/
13131cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13132{
13133 char prop[PROPERTY_VALUE_MAX];
13134 memset(prop, 0, sizeof(prop));
13135 property_get("persist.denoise.process.plates", prop, "0");
13136 int processPlate = atoi(prop);
13137 switch(processPlate) {
13138 case 0:
13139 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13140 case 1:
13141 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13142 case 2:
13143 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13144 case 3:
13145 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13146 default:
13147 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13148 }
13149}
13150
13151
13152/*===========================================================================
13153 * FUNCTION : getTemporalDenoiseProcessPlate
13154 *
13155 * DESCRIPTION: query temporal denoise process plate
13156 *
13157 * PARAMETERS : None
13158 *
13159 * RETURN : TNR prcocess plate value
13160 *==========================================================================*/
13161cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13162{
13163 char prop[PROPERTY_VALUE_MAX];
13164 memset(prop, 0, sizeof(prop));
13165 property_get("persist.tnr.process.plates", prop, "0");
13166 int processPlate = atoi(prop);
13167 switch(processPlate) {
13168 case 0:
13169 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13170 case 1:
13171 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13172 case 2:
13173 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13174 case 3:
13175 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13176 default:
13177 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13178 }
13179}
13180
13181
13182/*===========================================================================
13183 * FUNCTION : extractSceneMode
13184 *
13185 * DESCRIPTION: Extract scene mode from frameworks set metadata
13186 *
13187 * PARAMETERS :
13188 * @frame_settings: CameraMetadata reference
13189 * @metaMode: ANDROID_CONTORL_MODE
13190 * @hal_metadata: hal metadata structure
13191 *
13192 * RETURN : None
13193 *==========================================================================*/
13194int32_t QCamera3HardwareInterface::extractSceneMode(
13195 const CameraMetadata &frame_settings, uint8_t metaMode,
13196 metadata_buffer_t *hal_metadata)
13197{
13198 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013199 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13200
13201 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13202 LOGD("Ignoring control mode OFF_KEEP_STATE");
13203 return NO_ERROR;
13204 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013205
13206 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13207 camera_metadata_ro_entry entry =
13208 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13209 if (0 == entry.count)
13210 return rc;
13211
13212 uint8_t fwk_sceneMode = entry.data.u8[0];
13213
13214 int val = lookupHalName(SCENE_MODES_MAP,
13215 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13216 fwk_sceneMode);
13217 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013218 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013219 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013220 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013221 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013222
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013223 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13224 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13225 }
13226
13227 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13228 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013229 cam_hdr_param_t hdr_params;
13230 hdr_params.hdr_enable = 1;
13231 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13232 hdr_params.hdr_need_1x = false;
13233 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13234 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13235 rc = BAD_VALUE;
13236 }
13237 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013238
Thierry Strudel3d639192016-09-09 11:52:26 -070013239 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13240 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13241 rc = BAD_VALUE;
13242 }
13243 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013244
13245 if (mForceHdrSnapshot) {
13246 cam_hdr_param_t hdr_params;
13247 hdr_params.hdr_enable = 1;
13248 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13249 hdr_params.hdr_need_1x = false;
13250 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13251 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13252 rc = BAD_VALUE;
13253 }
13254 }
13255
Thierry Strudel3d639192016-09-09 11:52:26 -070013256 return rc;
13257}
13258
13259/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013260 * FUNCTION : setVideoHdrMode
13261 *
13262 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13263 *
13264 * PARAMETERS :
13265 * @hal_metadata: hal metadata structure
13266 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13267 *
13268 * RETURN : None
13269 *==========================================================================*/
13270int32_t QCamera3HardwareInterface::setVideoHdrMode(
13271 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13272{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013273 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13274 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13275 }
13276
13277 LOGE("Invalid Video HDR mode %d!", vhdr);
13278 return BAD_VALUE;
13279}
13280
13281/*===========================================================================
13282 * FUNCTION : setSensorHDR
13283 *
13284 * DESCRIPTION: Enable/disable sensor HDR.
13285 *
13286 * PARAMETERS :
13287 * @hal_metadata: hal metadata structure
13288 * @enable: boolean whether to enable/disable sensor HDR
13289 *
13290 * RETURN : None
13291 *==========================================================================*/
13292int32_t QCamera3HardwareInterface::setSensorHDR(
13293 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13294{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013295 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013296 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13297
13298 if (enable) {
13299 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13300 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13301 #ifdef _LE_CAMERA_
13302 //Default to staggered HDR for IOT
13303 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13304 #else
13305 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13306 #endif
13307 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13308 }
13309
13310 bool isSupported = false;
13311 switch (sensor_hdr) {
13312 case CAM_SENSOR_HDR_IN_SENSOR:
13313 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13314 CAM_QCOM_FEATURE_SENSOR_HDR) {
13315 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013316 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013317 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013318 break;
13319 case CAM_SENSOR_HDR_ZIGZAG:
13320 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13321 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13322 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013323 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013324 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013325 break;
13326 case CAM_SENSOR_HDR_STAGGERED:
13327 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13328 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13329 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013330 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013331 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013332 break;
13333 case CAM_SENSOR_HDR_OFF:
13334 isSupported = true;
13335 LOGD("Turning off sensor HDR");
13336 break;
13337 default:
13338 LOGE("HDR mode %d not supported", sensor_hdr);
13339 rc = BAD_VALUE;
13340 break;
13341 }
13342
13343 if(isSupported) {
13344 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13345 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13346 rc = BAD_VALUE;
13347 } else {
13348 if(!isVideoHdrEnable)
13349 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013350 }
13351 }
13352 return rc;
13353}
13354
13355/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013356 * FUNCTION : needRotationReprocess
13357 *
13358 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13359 *
13360 * PARAMETERS : none
13361 *
13362 * RETURN : true: needed
13363 * false: no need
13364 *==========================================================================*/
13365bool QCamera3HardwareInterface::needRotationReprocess()
13366{
13367 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13368 // current rotation is not zero, and pp has the capability to process rotation
13369 LOGH("need do reprocess for rotation");
13370 return true;
13371 }
13372
13373 return false;
13374}
13375
13376/*===========================================================================
13377 * FUNCTION : needReprocess
13378 *
13379 * DESCRIPTION: if reprocess in needed
13380 *
13381 * PARAMETERS : none
13382 *
13383 * RETURN : true: needed
13384 * false: no need
13385 *==========================================================================*/
13386bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13387{
13388 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13389 // TODO: add for ZSL HDR later
13390 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13391 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13392 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13393 return true;
13394 } else {
13395 LOGH("already post processed frame");
13396 return false;
13397 }
13398 }
13399 return needRotationReprocess();
13400}
13401
13402/*===========================================================================
13403 * FUNCTION : needJpegExifRotation
13404 *
13405 * DESCRIPTION: if rotation from jpeg is needed
13406 *
13407 * PARAMETERS : none
13408 *
13409 * RETURN : true: needed
13410 * false: no need
13411 *==========================================================================*/
13412bool QCamera3HardwareInterface::needJpegExifRotation()
13413{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013414 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013415 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13416 LOGD("Need use Jpeg EXIF Rotation");
13417 return true;
13418 }
13419 return false;
13420}
13421
13422/*===========================================================================
13423 * FUNCTION : addOfflineReprocChannel
13424 *
13425 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13426 * coming from input channel
13427 *
13428 * PARAMETERS :
13429 * @config : reprocess configuration
13430 * @inputChHandle : pointer to the input (source) channel
13431 *
13432 *
13433 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13434 *==========================================================================*/
13435QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13436 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13437{
13438 int32_t rc = NO_ERROR;
13439 QCamera3ReprocessChannel *pChannel = NULL;
13440
13441 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013442 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13443 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013444 if (NULL == pChannel) {
13445 LOGE("no mem for reprocess channel");
13446 return NULL;
13447 }
13448
13449 rc = pChannel->initialize(IS_TYPE_NONE);
13450 if (rc != NO_ERROR) {
13451 LOGE("init reprocess channel failed, ret = %d", rc);
13452 delete pChannel;
13453 return NULL;
13454 }
13455
13456 // pp feature config
13457 cam_pp_feature_config_t pp_config;
13458 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13459
13460 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13461 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13462 & CAM_QCOM_FEATURE_DSDN) {
13463 //Use CPP CDS incase h/w supports it.
13464 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13465 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13466 }
13467 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13468 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13469 }
13470
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013471 if (config.hdr_param.hdr_enable) {
13472 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13473 pp_config.hdr_param = config.hdr_param;
13474 }
13475
13476 if (mForceHdrSnapshot) {
13477 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13478 pp_config.hdr_param.hdr_enable = 1;
13479 pp_config.hdr_param.hdr_need_1x = 0;
13480 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13481 }
13482
Thierry Strudel3d639192016-09-09 11:52:26 -070013483 rc = pChannel->addReprocStreamsFromSource(pp_config,
13484 config,
13485 IS_TYPE_NONE,
13486 mMetadataChannel);
13487
13488 if (rc != NO_ERROR) {
13489 delete pChannel;
13490 return NULL;
13491 }
13492 return pChannel;
13493}
13494
13495/*===========================================================================
13496 * FUNCTION : getMobicatMask
13497 *
13498 * DESCRIPTION: returns mobicat mask
13499 *
13500 * PARAMETERS : none
13501 *
13502 * RETURN : mobicat mask
13503 *
13504 *==========================================================================*/
13505uint8_t QCamera3HardwareInterface::getMobicatMask()
13506{
13507 return m_MobicatMask;
13508}
13509
13510/*===========================================================================
13511 * FUNCTION : setMobicat
13512 *
13513 * DESCRIPTION: set Mobicat on/off.
13514 *
13515 * PARAMETERS :
13516 * @params : none
13517 *
13518 * RETURN : int32_t type of status
13519 * NO_ERROR -- success
13520 * none-zero failure code
13521 *==========================================================================*/
13522int32_t QCamera3HardwareInterface::setMobicat()
13523{
13524 char value [PROPERTY_VALUE_MAX];
13525 property_get("persist.camera.mobicat", value, "0");
13526 int32_t ret = NO_ERROR;
13527 uint8_t enableMobi = (uint8_t)atoi(value);
13528
13529 if (enableMobi) {
13530 tune_cmd_t tune_cmd;
13531 tune_cmd.type = SET_RELOAD_CHROMATIX;
13532 tune_cmd.module = MODULE_ALL;
13533 tune_cmd.value = TRUE;
13534 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13535 CAM_INTF_PARM_SET_VFE_COMMAND,
13536 tune_cmd);
13537
13538 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13539 CAM_INTF_PARM_SET_PP_COMMAND,
13540 tune_cmd);
13541 }
13542 m_MobicatMask = enableMobi;
13543
13544 return ret;
13545}
13546
13547/*===========================================================================
13548* FUNCTION : getLogLevel
13549*
13550* DESCRIPTION: Reads the log level property into a variable
13551*
13552* PARAMETERS :
13553* None
13554*
13555* RETURN :
13556* None
13557*==========================================================================*/
13558void QCamera3HardwareInterface::getLogLevel()
13559{
13560 char prop[PROPERTY_VALUE_MAX];
13561 uint32_t globalLogLevel = 0;
13562
13563 property_get("persist.camera.hal.debug", prop, "0");
13564 int val = atoi(prop);
13565 if (0 <= val) {
13566 gCamHal3LogLevel = (uint32_t)val;
13567 }
13568
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013569 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013570 gKpiDebugLevel = atoi(prop);
13571
13572 property_get("persist.camera.global.debug", prop, "0");
13573 val = atoi(prop);
13574 if (0 <= val) {
13575 globalLogLevel = (uint32_t)val;
13576 }
13577
13578 /* Highest log level among hal.logs and global.logs is selected */
13579 if (gCamHal3LogLevel < globalLogLevel)
13580 gCamHal3LogLevel = globalLogLevel;
13581
13582 return;
13583}
13584
13585/*===========================================================================
13586 * FUNCTION : validateStreamRotations
13587 *
13588 * DESCRIPTION: Check if the rotations requested are supported
13589 *
13590 * PARAMETERS :
13591 * @stream_list : streams to be configured
13592 *
13593 * RETURN : NO_ERROR on success
13594 * -EINVAL on failure
13595 *
13596 *==========================================================================*/
13597int QCamera3HardwareInterface::validateStreamRotations(
13598 camera3_stream_configuration_t *streamList)
13599{
13600 int rc = NO_ERROR;
13601
13602 /*
13603 * Loop through all streams requested in configuration
13604 * Check if unsupported rotations have been requested on any of them
13605 */
13606 for (size_t j = 0; j < streamList->num_streams; j++){
13607 camera3_stream_t *newStream = streamList->streams[j];
13608
13609 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13610 bool isImplDef = (newStream->format ==
13611 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13612 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13613 isImplDef);
13614
13615 if (isRotated && (!isImplDef || isZsl)) {
13616 LOGE("Error: Unsupported rotation of %d requested for stream"
13617 "type:%d and stream format:%d",
13618 newStream->rotation, newStream->stream_type,
13619 newStream->format);
13620 rc = -EINVAL;
13621 break;
13622 }
13623 }
13624
13625 return rc;
13626}
13627
13628/*===========================================================================
13629* FUNCTION : getFlashInfo
13630*
13631* DESCRIPTION: Retrieve information about whether the device has a flash.
13632*
13633* PARAMETERS :
13634* @cameraId : Camera id to query
13635* @hasFlash : Boolean indicating whether there is a flash device
13636* associated with given camera
13637* @flashNode : If a flash device exists, this will be its device node.
13638*
13639* RETURN :
13640* None
13641*==========================================================================*/
13642void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13643 bool& hasFlash,
13644 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13645{
13646 cam_capability_t* camCapability = gCamCapability[cameraId];
13647 if (NULL == camCapability) {
13648 hasFlash = false;
13649 flashNode[0] = '\0';
13650 } else {
13651 hasFlash = camCapability->flash_available;
13652 strlcpy(flashNode,
13653 (char*)camCapability->flash_dev_name,
13654 QCAMERA_MAX_FILEPATH_LENGTH);
13655 }
13656}
13657
13658/*===========================================================================
13659* FUNCTION : getEepromVersionInfo
13660*
13661* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13662*
13663* PARAMETERS : None
13664*
13665* RETURN : string describing EEPROM version
13666* "\0" if no such info available
13667*==========================================================================*/
13668const char *QCamera3HardwareInterface::getEepromVersionInfo()
13669{
13670 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13671}
13672
13673/*===========================================================================
13674* FUNCTION : getLdafCalib
13675*
13676* DESCRIPTION: Retrieve Laser AF calibration data
13677*
13678* PARAMETERS : None
13679*
13680* RETURN : Two uint32_t describing laser AF calibration data
13681* NULL if none is available.
13682*==========================================================================*/
13683const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13684{
13685 if (mLdafCalibExist) {
13686 return &mLdafCalib[0];
13687 } else {
13688 return NULL;
13689 }
13690}
13691
13692/*===========================================================================
13693 * FUNCTION : dynamicUpdateMetaStreamInfo
13694 *
13695 * DESCRIPTION: This function:
13696 * (1) stops all the channels
13697 * (2) returns error on pending requests and buffers
13698 * (3) sends metastream_info in setparams
13699 * (4) starts all channels
13700 * This is useful when sensor has to be restarted to apply any
13701 * settings such as frame rate from a different sensor mode
13702 *
13703 * PARAMETERS : None
13704 *
13705 * RETURN : NO_ERROR on success
13706 * Error codes on failure
13707 *
13708 *==========================================================================*/
13709int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13710{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013711 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013712 int rc = NO_ERROR;
13713
13714 LOGD("E");
13715
13716 rc = stopAllChannels();
13717 if (rc < 0) {
13718 LOGE("stopAllChannels failed");
13719 return rc;
13720 }
13721
13722 rc = notifyErrorForPendingRequests();
13723 if (rc < 0) {
13724 LOGE("notifyErrorForPendingRequests failed");
13725 return rc;
13726 }
13727
13728 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13729 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13730 "Format:%d",
13731 mStreamConfigInfo.type[i],
13732 mStreamConfigInfo.stream_sizes[i].width,
13733 mStreamConfigInfo.stream_sizes[i].height,
13734 mStreamConfigInfo.postprocess_mask[i],
13735 mStreamConfigInfo.format[i]);
13736 }
13737
13738 /* Send meta stream info once again so that ISP can start */
13739 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13740 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13741 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13742 mParameters);
13743 if (rc < 0) {
13744 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13745 }
13746
13747 rc = startAllChannels();
13748 if (rc < 0) {
13749 LOGE("startAllChannels failed");
13750 return rc;
13751 }
13752
13753 LOGD("X");
13754 return rc;
13755}
13756
13757/*===========================================================================
13758 * FUNCTION : stopAllChannels
13759 *
13760 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13761 *
13762 * PARAMETERS : None
13763 *
13764 * RETURN : NO_ERROR on success
13765 * Error codes on failure
13766 *
13767 *==========================================================================*/
13768int32_t QCamera3HardwareInterface::stopAllChannels()
13769{
13770 int32_t rc = NO_ERROR;
13771
13772 LOGD("Stopping all channels");
13773 // Stop the Streams/Channels
13774 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13775 it != mStreamInfo.end(); it++) {
13776 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13777 if (channel) {
13778 channel->stop();
13779 }
13780 (*it)->status = INVALID;
13781 }
13782
13783 if (mSupportChannel) {
13784 mSupportChannel->stop();
13785 }
13786 if (mAnalysisChannel) {
13787 mAnalysisChannel->stop();
13788 }
13789 if (mRawDumpChannel) {
13790 mRawDumpChannel->stop();
13791 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013792 if (mHdrPlusRawSrcChannel) {
13793 mHdrPlusRawSrcChannel->stop();
13794 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013795 if (mMetadataChannel) {
13796 /* If content of mStreamInfo is not 0, there is metadata stream */
13797 mMetadataChannel->stop();
13798 }
13799
13800 LOGD("All channels stopped");
13801 return rc;
13802}
13803
13804/*===========================================================================
13805 * FUNCTION : startAllChannels
13806 *
13807 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13808 *
13809 * PARAMETERS : None
13810 *
13811 * RETURN : NO_ERROR on success
13812 * Error codes on failure
13813 *
13814 *==========================================================================*/
13815int32_t QCamera3HardwareInterface::startAllChannels()
13816{
13817 int32_t rc = NO_ERROR;
13818
13819 LOGD("Start all channels ");
13820 // Start the Streams/Channels
13821 if (mMetadataChannel) {
13822 /* If content of mStreamInfo is not 0, there is metadata stream */
13823 rc = mMetadataChannel->start();
13824 if (rc < 0) {
13825 LOGE("META channel start failed");
13826 return rc;
13827 }
13828 }
13829 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13830 it != mStreamInfo.end(); it++) {
13831 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13832 if (channel) {
13833 rc = channel->start();
13834 if (rc < 0) {
13835 LOGE("channel start failed");
13836 return rc;
13837 }
13838 }
13839 }
13840 if (mAnalysisChannel) {
13841 mAnalysisChannel->start();
13842 }
13843 if (mSupportChannel) {
13844 rc = mSupportChannel->start();
13845 if (rc < 0) {
13846 LOGE("Support channel start failed");
13847 return rc;
13848 }
13849 }
13850 if (mRawDumpChannel) {
13851 rc = mRawDumpChannel->start();
13852 if (rc < 0) {
13853 LOGE("RAW dump channel start failed");
13854 return rc;
13855 }
13856 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013857 if (mHdrPlusRawSrcChannel) {
13858 rc = mHdrPlusRawSrcChannel->start();
13859 if (rc < 0) {
13860 LOGE("HDR+ RAW channel start failed");
13861 return rc;
13862 }
13863 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013864
13865 LOGD("All channels started");
13866 return rc;
13867}
13868
13869/*===========================================================================
13870 * FUNCTION : notifyErrorForPendingRequests
13871 *
13872 * DESCRIPTION: This function sends error for all the pending requests/buffers
13873 *
13874 * PARAMETERS : None
13875 *
13876 * RETURN : Error codes
13877 * NO_ERROR on success
13878 *
13879 *==========================================================================*/
13880int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13881{
13882 int32_t rc = NO_ERROR;
13883 unsigned int frameNum = 0;
13884 camera3_capture_result_t result;
13885 camera3_stream_buffer_t *pStream_Buf = NULL;
13886
13887 memset(&result, 0, sizeof(camera3_capture_result_t));
13888
13889 if (mPendingRequestsList.size() > 0) {
13890 pendingRequestIterator i = mPendingRequestsList.begin();
13891 frameNum = i->frame_number;
13892 } else {
13893 /* There might still be pending buffers even though there are
13894 no pending requests. Setting the frameNum to MAX so that
13895 all the buffers with smaller frame numbers are returned */
13896 frameNum = UINT_MAX;
13897 }
13898
13899 LOGH("Oldest frame num on mPendingRequestsList = %u",
13900 frameNum);
13901
Emilian Peev7650c122017-01-19 08:24:33 -080013902 notifyErrorFoPendingDepthData(mDepthChannel);
13903
Thierry Strudel3d639192016-09-09 11:52:26 -070013904 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13905 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13906
13907 if (req->frame_number < frameNum) {
13908 // Send Error notify to frameworks for each buffer for which
13909 // metadata buffer is already sent
13910 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13911 req->frame_number, req->mPendingBufferList.size());
13912
13913 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13914 if (NULL == pStream_Buf) {
13915 LOGE("No memory for pending buffers array");
13916 return NO_MEMORY;
13917 }
13918 memset(pStream_Buf, 0,
13919 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13920 result.result = NULL;
13921 result.frame_number = req->frame_number;
13922 result.num_output_buffers = req->mPendingBufferList.size();
13923 result.output_buffers = pStream_Buf;
13924
13925 size_t index = 0;
13926 for (auto info = req->mPendingBufferList.begin();
13927 info != req->mPendingBufferList.end(); ) {
13928
13929 camera3_notify_msg_t notify_msg;
13930 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13931 notify_msg.type = CAMERA3_MSG_ERROR;
13932 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13933 notify_msg.message.error.error_stream = info->stream;
13934 notify_msg.message.error.frame_number = req->frame_number;
13935 pStream_Buf[index].acquire_fence = -1;
13936 pStream_Buf[index].release_fence = -1;
13937 pStream_Buf[index].buffer = info->buffer;
13938 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13939 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013940 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013941 index++;
13942 // Remove buffer from list
13943 info = req->mPendingBufferList.erase(info);
13944 }
13945
13946 // Remove this request from Map
13947 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13948 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13949 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13950
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013951 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013952
13953 delete [] pStream_Buf;
13954 } else {
13955
13956 // Go through the pending requests info and send error request to framework
13957 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13958
13959 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13960
13961 // Send error notify to frameworks
13962 camera3_notify_msg_t notify_msg;
13963 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13964 notify_msg.type = CAMERA3_MSG_ERROR;
13965 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13966 notify_msg.message.error.error_stream = NULL;
13967 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013968 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013969
13970 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13971 if (NULL == pStream_Buf) {
13972 LOGE("No memory for pending buffers array");
13973 return NO_MEMORY;
13974 }
13975 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13976
13977 result.result = NULL;
13978 result.frame_number = req->frame_number;
13979 result.input_buffer = i->input_buffer;
13980 result.num_output_buffers = req->mPendingBufferList.size();
13981 result.output_buffers = pStream_Buf;
13982
13983 size_t index = 0;
13984 for (auto info = req->mPendingBufferList.begin();
13985 info != req->mPendingBufferList.end(); ) {
13986 pStream_Buf[index].acquire_fence = -1;
13987 pStream_Buf[index].release_fence = -1;
13988 pStream_Buf[index].buffer = info->buffer;
13989 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13990 pStream_Buf[index].stream = info->stream;
13991 index++;
13992 // Remove buffer from list
13993 info = req->mPendingBufferList.erase(info);
13994 }
13995
13996 // Remove this request from Map
13997 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13998 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13999 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
14000
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014001 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014002 delete [] pStream_Buf;
14003 i = erasePendingRequest(i);
14004 }
14005 }
14006
14007 /* Reset pending frame Drop list and requests list */
14008 mPendingFrameDropList.clear();
14009
14010 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
14011 req.mPendingBufferList.clear();
14012 }
14013 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014014 LOGH("Cleared all the pending buffers ");
14015
14016 return rc;
14017}
14018
14019bool QCamera3HardwareInterface::isOnEncoder(
14020 const cam_dimension_t max_viewfinder_size,
14021 uint32_t width, uint32_t height)
14022{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014023 return ((width > (uint32_t)max_viewfinder_size.width) ||
14024 (height > (uint32_t)max_viewfinder_size.height) ||
14025 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14026 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014027}
14028
14029/*===========================================================================
14030 * FUNCTION : setBundleInfo
14031 *
14032 * DESCRIPTION: Set bundle info for all streams that are bundle.
14033 *
14034 * PARAMETERS : None
14035 *
14036 * RETURN : NO_ERROR on success
14037 * Error codes on failure
14038 *==========================================================================*/
14039int32_t QCamera3HardwareInterface::setBundleInfo()
14040{
14041 int32_t rc = NO_ERROR;
14042
14043 if (mChannelHandle) {
14044 cam_bundle_config_t bundleInfo;
14045 memset(&bundleInfo, 0, sizeof(bundleInfo));
14046 rc = mCameraHandle->ops->get_bundle_info(
14047 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14048 if (rc != NO_ERROR) {
14049 LOGE("get_bundle_info failed");
14050 return rc;
14051 }
14052 if (mAnalysisChannel) {
14053 mAnalysisChannel->setBundleInfo(bundleInfo);
14054 }
14055 if (mSupportChannel) {
14056 mSupportChannel->setBundleInfo(bundleInfo);
14057 }
14058 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14059 it != mStreamInfo.end(); it++) {
14060 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14061 channel->setBundleInfo(bundleInfo);
14062 }
14063 if (mRawDumpChannel) {
14064 mRawDumpChannel->setBundleInfo(bundleInfo);
14065 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014066 if (mHdrPlusRawSrcChannel) {
14067 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14068 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014069 }
14070
14071 return rc;
14072}
14073
14074/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014075 * FUNCTION : setInstantAEC
14076 *
14077 * DESCRIPTION: Set Instant AEC related params.
14078 *
14079 * PARAMETERS :
14080 * @meta: CameraMetadata reference
14081 *
14082 * RETURN : NO_ERROR on success
14083 * Error codes on failure
14084 *==========================================================================*/
14085int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14086{
14087 int32_t rc = NO_ERROR;
14088 uint8_t val = 0;
14089 char prop[PROPERTY_VALUE_MAX];
14090
14091 // First try to configure instant AEC from framework metadata
14092 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14093 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14094 }
14095
14096 // If framework did not set this value, try to read from set prop.
14097 if (val == 0) {
14098 memset(prop, 0, sizeof(prop));
14099 property_get("persist.camera.instant.aec", prop, "0");
14100 val = (uint8_t)atoi(prop);
14101 }
14102
14103 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14104 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14105 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14106 mInstantAEC = val;
14107 mInstantAECSettledFrameNumber = 0;
14108 mInstantAecFrameIdxCount = 0;
14109 LOGH("instantAEC value set %d",val);
14110 if (mInstantAEC) {
14111 memset(prop, 0, sizeof(prop));
14112 property_get("persist.camera.ae.instant.bound", prop, "10");
14113 int32_t aec_frame_skip_cnt = atoi(prop);
14114 if (aec_frame_skip_cnt >= 0) {
14115 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14116 } else {
14117 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14118 rc = BAD_VALUE;
14119 }
14120 }
14121 } else {
14122 LOGE("Bad instant aec value set %d", val);
14123 rc = BAD_VALUE;
14124 }
14125 return rc;
14126}
14127
14128/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014129 * FUNCTION : get_num_overall_buffers
14130 *
14131 * DESCRIPTION: Estimate number of pending buffers across all requests.
14132 *
14133 * PARAMETERS : None
14134 *
14135 * RETURN : Number of overall pending buffers
14136 *
14137 *==========================================================================*/
14138uint32_t PendingBuffersMap::get_num_overall_buffers()
14139{
14140 uint32_t sum_buffers = 0;
14141 for (auto &req : mPendingBuffersInRequest) {
14142 sum_buffers += req.mPendingBufferList.size();
14143 }
14144 return sum_buffers;
14145}
14146
14147/*===========================================================================
14148 * FUNCTION : removeBuf
14149 *
14150 * DESCRIPTION: Remove a matching buffer from tracker.
14151 *
14152 * PARAMETERS : @buffer: image buffer for the callback
14153 *
14154 * RETURN : None
14155 *
14156 *==========================================================================*/
14157void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14158{
14159 bool buffer_found = false;
14160 for (auto req = mPendingBuffersInRequest.begin();
14161 req != mPendingBuffersInRequest.end(); req++) {
14162 for (auto k = req->mPendingBufferList.begin();
14163 k != req->mPendingBufferList.end(); k++ ) {
14164 if (k->buffer == buffer) {
14165 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14166 req->frame_number, buffer);
14167 k = req->mPendingBufferList.erase(k);
14168 if (req->mPendingBufferList.empty()) {
14169 // Remove this request from Map
14170 req = mPendingBuffersInRequest.erase(req);
14171 }
14172 buffer_found = true;
14173 break;
14174 }
14175 }
14176 if (buffer_found) {
14177 break;
14178 }
14179 }
14180 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14181 get_num_overall_buffers());
14182}
14183
14184/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014185 * FUNCTION : getBufErrStatus
14186 *
14187 * DESCRIPTION: get buffer error status
14188 *
14189 * PARAMETERS : @buffer: buffer handle
14190 *
14191 * RETURN : Error status
14192 *
14193 *==========================================================================*/
14194int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14195{
14196 for (auto& req : mPendingBuffersInRequest) {
14197 for (auto& k : req.mPendingBufferList) {
14198 if (k.buffer == buffer)
14199 return k.bufStatus;
14200 }
14201 }
14202 return CAMERA3_BUFFER_STATUS_OK;
14203}
14204
14205/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014206 * FUNCTION : setPAAFSupport
14207 *
14208 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14209 * feature mask according to stream type and filter
14210 * arrangement
14211 *
14212 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14213 * @stream_type: stream type
14214 * @filter_arrangement: filter arrangement
14215 *
14216 * RETURN : None
14217 *==========================================================================*/
14218void QCamera3HardwareInterface::setPAAFSupport(
14219 cam_feature_mask_t& feature_mask,
14220 cam_stream_type_t stream_type,
14221 cam_color_filter_arrangement_t filter_arrangement)
14222{
Thierry Strudel3d639192016-09-09 11:52:26 -070014223 switch (filter_arrangement) {
14224 case CAM_FILTER_ARRANGEMENT_RGGB:
14225 case CAM_FILTER_ARRANGEMENT_GRBG:
14226 case CAM_FILTER_ARRANGEMENT_GBRG:
14227 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014228 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14229 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014230 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014231 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14232 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014233 }
14234 break;
14235 case CAM_FILTER_ARRANGEMENT_Y:
14236 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14237 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14238 }
14239 break;
14240 default:
14241 break;
14242 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014243 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14244 feature_mask, stream_type, filter_arrangement);
14245
14246
Thierry Strudel3d639192016-09-09 11:52:26 -070014247}
14248
14249/*===========================================================================
14250* FUNCTION : getSensorMountAngle
14251*
14252* DESCRIPTION: Retrieve sensor mount angle
14253*
14254* PARAMETERS : None
14255*
14256* RETURN : sensor mount angle in uint32_t
14257*==========================================================================*/
14258uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14259{
14260 return gCamCapability[mCameraId]->sensor_mount_angle;
14261}
14262
14263/*===========================================================================
14264* FUNCTION : getRelatedCalibrationData
14265*
14266* DESCRIPTION: Retrieve related system calibration data
14267*
14268* PARAMETERS : None
14269*
14270* RETURN : Pointer of related system calibration data
14271*==========================================================================*/
14272const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14273{
14274 return (const cam_related_system_calibration_data_t *)
14275 &(gCamCapability[mCameraId]->related_cam_calibration);
14276}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014277
14278/*===========================================================================
14279 * FUNCTION : is60HzZone
14280 *
14281 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14282 *
14283 * PARAMETERS : None
14284 *
14285 * RETURN : True if in 60Hz zone, False otherwise
14286 *==========================================================================*/
14287bool QCamera3HardwareInterface::is60HzZone()
14288{
14289 time_t t = time(NULL);
14290 struct tm lt;
14291
14292 struct tm* r = localtime_r(&t, &lt);
14293
14294 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14295 return true;
14296 else
14297 return false;
14298}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014299
14300/*===========================================================================
14301 * FUNCTION : adjustBlackLevelForCFA
14302 *
14303 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14304 * of bayer CFA (Color Filter Array).
14305 *
14306 * PARAMETERS : @input: black level pattern in the order of RGGB
14307 * @output: black level pattern in the order of CFA
14308 * @color_arrangement: CFA color arrangement
14309 *
14310 * RETURN : None
14311 *==========================================================================*/
14312template<typename T>
14313void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14314 T input[BLACK_LEVEL_PATTERN_CNT],
14315 T output[BLACK_LEVEL_PATTERN_CNT],
14316 cam_color_filter_arrangement_t color_arrangement)
14317{
14318 switch (color_arrangement) {
14319 case CAM_FILTER_ARRANGEMENT_GRBG:
14320 output[0] = input[1];
14321 output[1] = input[0];
14322 output[2] = input[3];
14323 output[3] = input[2];
14324 break;
14325 case CAM_FILTER_ARRANGEMENT_GBRG:
14326 output[0] = input[2];
14327 output[1] = input[3];
14328 output[2] = input[0];
14329 output[3] = input[1];
14330 break;
14331 case CAM_FILTER_ARRANGEMENT_BGGR:
14332 output[0] = input[3];
14333 output[1] = input[2];
14334 output[2] = input[1];
14335 output[3] = input[0];
14336 break;
14337 case CAM_FILTER_ARRANGEMENT_RGGB:
14338 output[0] = input[0];
14339 output[1] = input[1];
14340 output[2] = input[2];
14341 output[3] = input[3];
14342 break;
14343 default:
14344 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14345 break;
14346 }
14347}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014348
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014349void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14350 CameraMetadata &resultMetadata,
14351 std::shared_ptr<metadata_buffer_t> settings)
14352{
14353 if (settings == nullptr) {
14354 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14355 return;
14356 }
14357
14358 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14359 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14360 }
14361
14362 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14363 String8 str((const char *)gps_methods);
14364 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14365 }
14366
14367 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14368 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14369 }
14370
14371 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14372 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14373 }
14374
14375 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14376 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14377 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14378 }
14379
14380 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14381 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14382 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14383 }
14384
14385 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14386 int32_t fwk_thumb_size[2];
14387 fwk_thumb_size[0] = thumb_size->width;
14388 fwk_thumb_size[1] = thumb_size->height;
14389 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14390 }
14391
14392 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14393 uint8_t fwk_intent = intent[0];
14394 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14395 }
14396}
14397
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014398bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14399 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14400 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014401{
14402 if (hdrPlusRequest == nullptr) return false;
14403
14404 // Check noise reduction mode is high quality.
14405 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14406 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14407 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014408 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14409 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014410 return false;
14411 }
14412
14413 // Check edge mode is high quality.
14414 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14415 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14416 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14417 return false;
14418 }
14419
14420 if (request.num_output_buffers != 1 ||
14421 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14422 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014423 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14424 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14425 request.output_buffers[0].stream->width,
14426 request.output_buffers[0].stream->height,
14427 request.output_buffers[0].stream->format);
14428 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014429 return false;
14430 }
14431
14432 // Get a YUV buffer from pic channel.
14433 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14434 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14435 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14436 if (res != OK) {
14437 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14438 __FUNCTION__, strerror(-res), res);
14439 return false;
14440 }
14441
14442 pbcamera::StreamBuffer buffer;
14443 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014444 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014445 buffer.data = yuvBuffer->buffer;
14446 buffer.dataSize = yuvBuffer->frame_len;
14447
14448 pbcamera::CaptureRequest pbRequest;
14449 pbRequest.id = request.frame_number;
14450 pbRequest.outputBuffers.push_back(buffer);
14451
14452 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014453 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014454 if (res != OK) {
14455 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14456 strerror(-res), res);
14457 return false;
14458 }
14459
14460 hdrPlusRequest->yuvBuffer = yuvBuffer;
14461 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14462
14463 return true;
14464}
14465
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014466status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14467 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14468 return OK;
14469 }
14470
14471 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14472 if (res != OK) {
14473 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14474 strerror(-res), res);
14475 return res;
14476 }
14477 gHdrPlusClientOpening = true;
14478
14479 return OK;
14480}
14481
Chien-Yu Chenee335912017-02-09 17:53:20 -080014482status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14483{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014484 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014485
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014486 // Check if gHdrPlusClient is opened or being opened.
14487 if (gHdrPlusClient == nullptr) {
14488 if (gHdrPlusClientOpening) {
14489 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14490 return OK;
14491 }
14492
14493 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014494 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014495 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14496 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014497 return res;
14498 }
14499
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014500 // When opening HDR+ client completes, HDR+ mode will be enabled.
14501 return OK;
14502
Chien-Yu Chenee335912017-02-09 17:53:20 -080014503 }
14504
14505 // Configure stream for HDR+.
14506 res = configureHdrPlusStreamsLocked();
14507 if (res != OK) {
14508 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014509 return res;
14510 }
14511
14512 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14513 res = gHdrPlusClient->setZslHdrPlusMode(true);
14514 if (res != OK) {
14515 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014516 return res;
14517 }
14518
14519 mHdrPlusModeEnabled = true;
14520 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14521
14522 return OK;
14523}
14524
14525void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14526{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014527 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014528 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014529 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14530 if (res != OK) {
14531 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14532 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014533
14534 // Close HDR+ client so Easel can enter low power mode.
14535 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14536 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014537 }
14538
14539 mHdrPlusModeEnabled = false;
14540 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14541}
14542
14543status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014544{
14545 pbcamera::InputConfiguration inputConfig;
14546 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14547 status_t res = OK;
14548
14549 // Configure HDR+ client streams.
14550 // Get input config.
14551 if (mHdrPlusRawSrcChannel) {
14552 // HDR+ input buffers will be provided by HAL.
14553 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14554 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14555 if (res != OK) {
14556 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14557 __FUNCTION__, strerror(-res), res);
14558 return res;
14559 }
14560
14561 inputConfig.isSensorInput = false;
14562 } else {
14563 // Sensor MIPI will send data to Easel.
14564 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014565 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014566 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14567 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14568 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14569 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14570 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14571 if (mSensorModeInfo.num_raw_bits != 10) {
14572 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14573 mSensorModeInfo.num_raw_bits);
14574 return BAD_VALUE;
14575 }
14576
14577 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014578 }
14579
14580 // Get output configurations.
14581 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014582 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014583
14584 // Easel may need to output YUV output buffers if mPictureChannel was created.
14585 pbcamera::StreamConfiguration yuvOutputConfig;
14586 if (mPictureChannel != nullptr) {
14587 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14588 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14589 if (res != OK) {
14590 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14591 __FUNCTION__, strerror(-res), res);
14592
14593 return res;
14594 }
14595
14596 outputStreamConfigs.push_back(yuvOutputConfig);
14597 }
14598
14599 // TODO: consider other channels for YUV output buffers.
14600
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014601 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014602 if (res != OK) {
14603 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14604 strerror(-res), res);
14605 return res;
14606 }
14607
14608 return OK;
14609}
14610
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014611void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14612 if (client == nullptr) {
14613 ALOGE("%s: Opened client is null.", __FUNCTION__);
14614 return;
14615 }
14616
14617 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14618
14619 Mutex::Autolock l(gHdrPlusClientLock);
14620 gHdrPlusClient = std::move(client);
14621 gHdrPlusClientOpening = false;
14622
14623 // Set static metadata.
14624 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14625 if (res != OK) {
14626 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14627 __FUNCTION__, strerror(-res), res);
14628 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14629 gHdrPlusClient = nullptr;
14630 return;
14631 }
14632
14633 // Enable HDR+ mode.
14634 res = enableHdrPlusModeLocked();
14635 if (res != OK) {
14636 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14637 }
14638}
14639
14640void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14641 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14642 Mutex::Autolock l(gHdrPlusClientLock);
14643 gHdrPlusClientOpening = false;
14644}
14645
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014646void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14647 const camera_metadata_t &resultMetadata) {
14648 if (result != nullptr) {
14649 if (result->outputBuffers.size() != 1) {
14650 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14651 result->outputBuffers.size());
14652 return;
14653 }
14654
14655 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14656 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14657 result->outputBuffers[0].streamId);
14658 return;
14659 }
14660
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014661 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014662 HdrPlusPendingRequest pendingRequest;
14663 {
14664 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14665 auto req = mHdrPlusPendingRequests.find(result->requestId);
14666 pendingRequest = req->second;
14667 }
14668
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014669 // Update the result metadata with the settings of the HDR+ still capture request because
14670 // the result metadata belongs to a ZSL buffer.
14671 CameraMetadata metadata;
14672 metadata = &resultMetadata;
14673 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14674 camera_metadata_t* updatedResultMetadata = metadata.release();
14675
14676 QCamera3PicChannel *picChannel =
14677 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14678
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014679 // Check if dumping HDR+ YUV output is enabled.
14680 char prop[PROPERTY_VALUE_MAX];
14681 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14682 bool dumpYuvOutput = atoi(prop);
14683
14684 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014685 // Dump yuv buffer to a ppm file.
14686 pbcamera::StreamConfiguration outputConfig;
14687 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14688 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14689 if (rc == OK) {
14690 char buf[FILENAME_MAX] = {};
14691 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14692 result->requestId, result->outputBuffers[0].streamId,
14693 outputConfig.image.width, outputConfig.image.height);
14694
14695 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14696 } else {
14697 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14698 __FUNCTION__, strerror(-rc), rc);
14699 }
14700 }
14701
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014702 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14703 auto halMetadata = std::make_shared<metadata_buffer_t>();
14704 clear_metadata_buffer(halMetadata.get());
14705
14706 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14707 // encoding.
14708 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14709 halStreamId, /*minFrameDuration*/0);
14710 if (res == OK) {
14711 // Return the buffer to pic channel for encoding.
14712 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14713 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14714 halMetadata);
14715 } else {
14716 // Return the buffer without encoding.
14717 // TODO: This should not happen but we may want to report an error buffer to camera
14718 // service.
14719 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14720 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14721 strerror(-res), res);
14722 }
14723
14724 // Send HDR+ metadata to framework.
14725 {
14726 pthread_mutex_lock(&mMutex);
14727
14728 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14729 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14730 pthread_mutex_unlock(&mMutex);
14731 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014732
14733 // Remove the HDR+ pending request.
14734 {
14735 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14736 auto req = mHdrPlusPendingRequests.find(result->requestId);
14737 mHdrPlusPendingRequests.erase(req);
14738 }
14739 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014740}
14741
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014742void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14743 // TODO: Handle HDR+ capture failures and send the failure to framework.
14744 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14745 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14746
14747 // Return the buffer to pic channel.
14748 QCamera3PicChannel *picChannel =
14749 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14750 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14751
14752 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014753}
14754
Thierry Strudel3d639192016-09-09 11:52:26 -070014755}; //end namespace qcamera