blob: ef60ea735b99b3b52b3c67621d4ba7aad75f178d [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700503 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700504 mState(CLOSED),
505 mIsDeviceLinked(false),
506 mIsMainCamera(true),
507 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800509 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800510 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700511 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800512 mIsApInputUsedForHdrPlus(false),
513 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800514 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700515{
516 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700517 mCommon.init(gCamCapability[cameraId]);
518 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700519#ifndef USE_HAL_3_3
520 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
521#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700522 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700523#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700524 mCameraDevice.common.close = close_camera_device;
525 mCameraDevice.ops = &mCameraOps;
526 mCameraDevice.priv = this;
527 gCamCapability[cameraId]->version = CAM_HAL_V3;
528 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
529 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
530 gCamCapability[cameraId]->min_num_pp_bufs = 3;
531
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800532 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700533
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800534 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mPendingLiveRequest = 0;
536 mCurrentRequestId = -1;
537 pthread_mutex_init(&mMutex, NULL);
538
539 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
540 mDefaultMetadata[i] = NULL;
541
542 // Getting system props of different kinds
543 char prop[PROPERTY_VALUE_MAX];
544 memset(prop, 0, sizeof(prop));
545 property_get("persist.camera.raw.dump", prop, "0");
546 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800547 property_get("persist.camera.hal3.force.hdr", prop, "0");
548 mForceHdrSnapshot = atoi(prop);
549
Thierry Strudel3d639192016-09-09 11:52:26 -0700550 if (mEnableRawDump)
551 LOGD("Raw dump from Camera HAL enabled");
552
553 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
554 memset(mLdafCalib, 0, sizeof(mLdafCalib));
555
556 memset(prop, 0, sizeof(prop));
557 property_get("persist.camera.tnr.preview", prop, "0");
558 m_bTnrPreview = (uint8_t)atoi(prop);
559
560 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800561 property_get("persist.camera.swtnr.preview", prop, "1");
562 m_bSwTnrPreview = (uint8_t)atoi(prop);
563
564 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700565 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700566 m_bTnrVideo = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
569 property_get("persist.camera.avtimer.debug", prop, "0");
570 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800571 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700572
Thierry Strudel54dc9782017-02-15 12:12:10 -0800573 memset(prop, 0, sizeof(prop));
574 property_get("persist.camera.cacmode.disable", prop, "0");
575 m_cacModeDisabled = (uint8_t)atoi(prop);
576
Thierry Strudel3d639192016-09-09 11:52:26 -0700577 //Load and read GPU library.
578 lib_surface_utils = NULL;
579 LINK_get_surface_pixel_alignment = NULL;
580 mSurfaceStridePadding = CAM_PAD_TO_32;
581 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
582 if (lib_surface_utils) {
583 *(void **)&LINK_get_surface_pixel_alignment =
584 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
585 if (LINK_get_surface_pixel_alignment) {
586 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
587 }
588 dlclose(lib_surface_utils);
589 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700590
Emilian Peev0f3c3162017-03-15 12:57:46 +0000591 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
592 mPDSupported = (0 <= mPDIndex) ? true : false;
593
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700594 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700595}
596
597/*===========================================================================
598 * FUNCTION : ~QCamera3HardwareInterface
599 *
600 * DESCRIPTION: destructor of QCamera3HardwareInterface
601 *
602 * PARAMETERS : none
603 *
604 * RETURN : none
605 *==========================================================================*/
606QCamera3HardwareInterface::~QCamera3HardwareInterface()
607{
608 LOGD("E");
609
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800610 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700611
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800612 // Disable power hint and enable the perf lock for close camera
613 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
614 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
615
616 // unlink of dualcam during close camera
617 if (mIsDeviceLinked) {
618 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
619 &m_pDualCamCmdPtr->bundle_info;
620 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
621 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
622 pthread_mutex_lock(&gCamLock);
623
624 if (mIsMainCamera == 1) {
625 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
626 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
627 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
628 // related session id should be session id of linked session
629 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
630 } else {
631 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
632 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
633 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
634 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
635 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800636 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800637 pthread_mutex_unlock(&gCamLock);
638
639 rc = mCameraHandle->ops->set_dual_cam_cmd(
640 mCameraHandle->camera_handle);
641 if (rc < 0) {
642 LOGE("Dualcam: Unlink failed, but still proceed to close");
643 }
644 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700645
646 /* We need to stop all streams before deleting any stream */
647 if (mRawDumpChannel) {
648 mRawDumpChannel->stop();
649 }
650
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700651 if (mHdrPlusRawSrcChannel) {
652 mHdrPlusRawSrcChannel->stop();
653 }
654
Thierry Strudel3d639192016-09-09 11:52:26 -0700655 // NOTE: 'camera3_stream_t *' objects are already freed at
656 // this stage by the framework
657 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
658 it != mStreamInfo.end(); it++) {
659 QCamera3ProcessingChannel *channel = (*it)->channel;
660 if (channel) {
661 channel->stop();
662 }
663 }
664 if (mSupportChannel)
665 mSupportChannel->stop();
666
667 if (mAnalysisChannel) {
668 mAnalysisChannel->stop();
669 }
670 if (mMetadataChannel) {
671 mMetadataChannel->stop();
672 }
673 if (mChannelHandle) {
674 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
675 mChannelHandle);
676 LOGD("stopping channel %d", mChannelHandle);
677 }
678
679 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
680 it != mStreamInfo.end(); it++) {
681 QCamera3ProcessingChannel *channel = (*it)->channel;
682 if (channel)
683 delete channel;
684 free (*it);
685 }
686 if (mSupportChannel) {
687 delete mSupportChannel;
688 mSupportChannel = NULL;
689 }
690
691 if (mAnalysisChannel) {
692 delete mAnalysisChannel;
693 mAnalysisChannel = NULL;
694 }
695 if (mRawDumpChannel) {
696 delete mRawDumpChannel;
697 mRawDumpChannel = NULL;
698 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700699 if (mHdrPlusRawSrcChannel) {
700 delete mHdrPlusRawSrcChannel;
701 mHdrPlusRawSrcChannel = NULL;
702 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700703 if (mDummyBatchChannel) {
704 delete mDummyBatchChannel;
705 mDummyBatchChannel = NULL;
706 }
707
708 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800709 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700710
711 if (mMetadataChannel) {
712 delete mMetadataChannel;
713 mMetadataChannel = NULL;
714 }
715
716 /* Clean up all channels */
717 if (mCameraInitialized) {
718 if(!mFirstConfiguration){
719 //send the last unconfigure
720 cam_stream_size_info_t stream_config_info;
721 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
722 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
723 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800724 m_bIs4KVideo ? 0 :
725 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700726 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700727 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
728 stream_config_info);
729 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
730 if (rc < 0) {
731 LOGE("set_parms failed for unconfigure");
732 }
733 }
734 deinitParameters();
735 }
736
737 if (mChannelHandle) {
738 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
739 mChannelHandle);
740 LOGH("deleting channel %d", mChannelHandle);
741 mChannelHandle = 0;
742 }
743
744 if (mState != CLOSED)
745 closeCamera();
746
747 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
748 req.mPendingBufferList.clear();
749 }
750 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700751 for (pendingRequestIterator i = mPendingRequestsList.begin();
752 i != mPendingRequestsList.end();) {
753 i = erasePendingRequest(i);
754 }
755 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
756 if (mDefaultMetadata[i])
757 free_camera_metadata(mDefaultMetadata[i]);
758
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800759 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700760
761 pthread_cond_destroy(&mRequestCond);
762
763 pthread_cond_destroy(&mBuffersCond);
764
765 pthread_mutex_destroy(&mMutex);
766 LOGD("X");
767}
768
769/*===========================================================================
770 * FUNCTION : erasePendingRequest
771 *
772 * DESCRIPTION: function to erase a desired pending request after freeing any
773 * allocated memory
774 *
775 * PARAMETERS :
776 * @i : iterator pointing to pending request to be erased
777 *
778 * RETURN : iterator pointing to the next request
779 *==========================================================================*/
780QCamera3HardwareInterface::pendingRequestIterator
781 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
782{
783 if (i->input_buffer != NULL) {
784 free(i->input_buffer);
785 i->input_buffer = NULL;
786 }
787 if (i->settings != NULL)
788 free_camera_metadata((camera_metadata_t*)i->settings);
789 return mPendingRequestsList.erase(i);
790}
791
792/*===========================================================================
793 * FUNCTION : camEvtHandle
794 *
795 * DESCRIPTION: Function registered to mm-camera-interface to handle events
796 *
797 * PARAMETERS :
798 * @camera_handle : interface layer camera handle
799 * @evt : ptr to event
800 * @user_data : user data ptr
801 *
802 * RETURN : none
803 *==========================================================================*/
804void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
805 mm_camera_event_t *evt,
806 void *user_data)
807{
808 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
809 if (obj && evt) {
810 switch(evt->server_event_type) {
811 case CAM_EVENT_TYPE_DAEMON_DIED:
812 pthread_mutex_lock(&obj->mMutex);
813 obj->mState = ERROR;
814 pthread_mutex_unlock(&obj->mMutex);
815 LOGE("Fatal, camera daemon died");
816 break;
817
818 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
819 LOGD("HAL got request pull from Daemon");
820 pthread_mutex_lock(&obj->mMutex);
821 obj->mWokenUpByDaemon = true;
822 obj->unblockRequestIfNecessary();
823 pthread_mutex_unlock(&obj->mMutex);
824 break;
825
826 default:
827 LOGW("Warning: Unhandled event %d",
828 evt->server_event_type);
829 break;
830 }
831 } else {
832 LOGE("NULL user_data/evt");
833 }
834}
835
836/*===========================================================================
837 * FUNCTION : openCamera
838 *
839 * DESCRIPTION: open camera
840 *
841 * PARAMETERS :
842 * @hw_device : double ptr for camera device struct
843 *
844 * RETURN : int32_t type of status
845 * NO_ERROR -- success
846 * none-zero failure code
847 *==========================================================================*/
848int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
849{
850 int rc = 0;
851 if (mState != CLOSED) {
852 *hw_device = NULL;
853 return PERMISSION_DENIED;
854 }
855
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700856 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800857 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700858 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
859 mCameraId);
860
861 rc = openCamera();
862 if (rc == 0) {
863 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800864 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700865 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800866 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700867
Thierry Strudel3d639192016-09-09 11:52:26 -0700868 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
869 mCameraId, rc);
870
871 if (rc == NO_ERROR) {
872 mState = OPENED;
873 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800874
Thierry Strudel3d639192016-09-09 11:52:26 -0700875 return rc;
876}
877
878/*===========================================================================
879 * FUNCTION : openCamera
880 *
881 * DESCRIPTION: open camera
882 *
883 * PARAMETERS : none
884 *
885 * RETURN : int32_t type of status
886 * NO_ERROR -- success
887 * none-zero failure code
888 *==========================================================================*/
889int QCamera3HardwareInterface::openCamera()
890{
891 int rc = 0;
892 char value[PROPERTY_VALUE_MAX];
893
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800894 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700895 if (mCameraHandle) {
896 LOGE("Failure: Camera already opened");
897 return ALREADY_EXISTS;
898 }
899
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700900 {
901 Mutex::Autolock l(gHdrPlusClientLock);
902 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700903 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700904 rc = gEaselManagerClient.resume();
905 if (rc != 0) {
906 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
907 return rc;
908 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800909 }
910 }
911
Thierry Strudel3d639192016-09-09 11:52:26 -0700912 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
913 if (rc < 0) {
914 LOGE("Failed to reserve flash for camera id: %d",
915 mCameraId);
916 return UNKNOWN_ERROR;
917 }
918
919 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
920 if (rc) {
921 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
922 return rc;
923 }
924
925 if (!mCameraHandle) {
926 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
927 return -ENODEV;
928 }
929
930 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
931 camEvtHandle, (void *)this);
932
933 if (rc < 0) {
934 LOGE("Error, failed to register event callback");
935 /* Not closing camera here since it is already handled in destructor */
936 return FAILED_TRANSACTION;
937 }
938
939 mExifParams.debug_params =
940 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
941 if (mExifParams.debug_params) {
942 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
943 } else {
944 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
945 return NO_MEMORY;
946 }
947 mFirstConfiguration = true;
948
949 //Notify display HAL that a camera session is active.
950 //But avoid calling the same during bootup because camera service might open/close
951 //cameras at boot time during its initialization and display service will also internally
952 //wait for camera service to initialize first while calling this display API, resulting in a
953 //deadlock situation. Since boot time camera open/close calls are made only to fetch
954 //capabilities, no need of this display bw optimization.
955 //Use "service.bootanim.exit" property to know boot status.
956 property_get("service.bootanim.exit", value, "0");
957 if (atoi(value) == 1) {
958 pthread_mutex_lock(&gCamLock);
959 if (gNumCameraSessions++ == 0) {
960 setCameraLaunchStatus(true);
961 }
962 pthread_mutex_unlock(&gCamLock);
963 }
964
965 //fill the session id needed while linking dual cam
966 pthread_mutex_lock(&gCamLock);
967 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
968 &sessionId[mCameraId]);
969 pthread_mutex_unlock(&gCamLock);
970
971 if (rc < 0) {
972 LOGE("Error, failed to get sessiion id");
973 return UNKNOWN_ERROR;
974 } else {
975 //Allocate related cam sync buffer
976 //this is needed for the payload that goes along with bundling cmd for related
977 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700978 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
979 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700980 if(rc != OK) {
981 rc = NO_MEMORY;
982 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
983 return NO_MEMORY;
984 }
985
986 //Map memory for related cam sync buffer
987 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700988 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
989 m_pDualCamCmdHeap->getFd(0),
990 sizeof(cam_dual_camera_cmd_info_t),
991 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700992 if(rc < 0) {
993 LOGE("Dualcam: failed to map Related cam sync buffer");
994 rc = FAILED_TRANSACTION;
995 return NO_MEMORY;
996 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700997 m_pDualCamCmdPtr =
998 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700999 }
1000
1001 LOGH("mCameraId=%d",mCameraId);
1002
1003 return NO_ERROR;
1004}
1005
1006/*===========================================================================
1007 * FUNCTION : closeCamera
1008 *
1009 * DESCRIPTION: close camera
1010 *
1011 * PARAMETERS : none
1012 *
1013 * RETURN : int32_t type of status
1014 * NO_ERROR -- success
1015 * none-zero failure code
1016 *==========================================================================*/
1017int QCamera3HardwareInterface::closeCamera()
1018{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001019 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001020 int rc = NO_ERROR;
1021 char value[PROPERTY_VALUE_MAX];
1022
1023 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1024 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001025
1026 // unmap memory for related cam sync buffer
1027 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001028 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001029 if (NULL != m_pDualCamCmdHeap) {
1030 m_pDualCamCmdHeap->deallocate();
1031 delete m_pDualCamCmdHeap;
1032 m_pDualCamCmdHeap = NULL;
1033 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001034 }
1035
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1037 mCameraHandle = NULL;
1038
1039 //reset session id to some invalid id
1040 pthread_mutex_lock(&gCamLock);
1041 sessionId[mCameraId] = 0xDEADBEEF;
1042 pthread_mutex_unlock(&gCamLock);
1043
1044 //Notify display HAL that there is no active camera session
1045 //but avoid calling the same during bootup. Refer to openCamera
1046 //for more details.
1047 property_get("service.bootanim.exit", value, "0");
1048 if (atoi(value) == 1) {
1049 pthread_mutex_lock(&gCamLock);
1050 if (--gNumCameraSessions == 0) {
1051 setCameraLaunchStatus(false);
1052 }
1053 pthread_mutex_unlock(&gCamLock);
1054 }
1055
Thierry Strudel3d639192016-09-09 11:52:26 -07001056 if (mExifParams.debug_params) {
1057 free(mExifParams.debug_params);
1058 mExifParams.debug_params = NULL;
1059 }
1060 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1061 LOGW("Failed to release flash for camera id: %d",
1062 mCameraId);
1063 }
1064 mState = CLOSED;
1065 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1066 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001067
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001068 {
1069 Mutex::Autolock l(gHdrPlusClientLock);
1070 if (gHdrPlusClient != nullptr) {
1071 // Disable HDR+ mode.
1072 disableHdrPlusModeLocked();
1073 // Disconnect Easel if it's connected.
1074 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1075 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001076 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001077
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001078 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001079 rc = gEaselManagerClient.stopMipi(mCameraId);
1080 if (rc != 0) {
1081 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1082 }
1083
1084 rc = gEaselManagerClient.suspend();
1085 if (rc != 0) {
1086 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1087 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001088 }
1089 }
1090
Thierry Strudel3d639192016-09-09 11:52:26 -07001091 return rc;
1092}
1093
1094/*===========================================================================
1095 * FUNCTION : initialize
1096 *
1097 * DESCRIPTION: Initialize frameworks callback functions
1098 *
1099 * PARAMETERS :
1100 * @callback_ops : callback function to frameworks
1101 *
1102 * RETURN :
1103 *
1104 *==========================================================================*/
1105int QCamera3HardwareInterface::initialize(
1106 const struct camera3_callback_ops *callback_ops)
1107{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001108 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001109 int rc;
1110
1111 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1112 pthread_mutex_lock(&mMutex);
1113
1114 // Validate current state
1115 switch (mState) {
1116 case OPENED:
1117 /* valid state */
1118 break;
1119 default:
1120 LOGE("Invalid state %d", mState);
1121 rc = -ENODEV;
1122 goto err1;
1123 }
1124
1125 rc = initParameters();
1126 if (rc < 0) {
1127 LOGE("initParamters failed %d", rc);
1128 goto err1;
1129 }
1130 mCallbackOps = callback_ops;
1131
1132 mChannelHandle = mCameraHandle->ops->add_channel(
1133 mCameraHandle->camera_handle, NULL, NULL, this);
1134 if (mChannelHandle == 0) {
1135 LOGE("add_channel failed");
1136 rc = -ENOMEM;
1137 pthread_mutex_unlock(&mMutex);
1138 return rc;
1139 }
1140
1141 pthread_mutex_unlock(&mMutex);
1142 mCameraInitialized = true;
1143 mState = INITIALIZED;
1144 LOGI("X");
1145 return 0;
1146
1147err1:
1148 pthread_mutex_unlock(&mMutex);
1149 return rc;
1150}
1151
1152/*===========================================================================
1153 * FUNCTION : validateStreamDimensions
1154 *
1155 * DESCRIPTION: Check if the configuration requested are those advertised
1156 *
1157 * PARAMETERS :
1158 * @stream_list : streams to be configured
1159 *
1160 * RETURN :
1161 *
1162 *==========================================================================*/
1163int QCamera3HardwareInterface::validateStreamDimensions(
1164 camera3_stream_configuration_t *streamList)
1165{
1166 int rc = NO_ERROR;
1167 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001168 uint32_t depthWidth = 0;
1169 uint32_t depthHeight = 0;
1170 if (mPDSupported) {
1171 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1172 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1173 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001174
1175 camera3_stream_t *inputStream = NULL;
1176 /*
1177 * Loop through all streams to find input stream if it exists*
1178 */
1179 for (size_t i = 0; i< streamList->num_streams; i++) {
1180 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1181 if (inputStream != NULL) {
1182 LOGE("Error, Multiple input streams requested");
1183 return -EINVAL;
1184 }
1185 inputStream = streamList->streams[i];
1186 }
1187 }
1188 /*
1189 * Loop through all streams requested in configuration
1190 * Check if unsupported sizes have been requested on any of them
1191 */
1192 for (size_t j = 0; j < streamList->num_streams; j++) {
1193 bool sizeFound = false;
1194 camera3_stream_t *newStream = streamList->streams[j];
1195
1196 uint32_t rotatedHeight = newStream->height;
1197 uint32_t rotatedWidth = newStream->width;
1198 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1199 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1200 rotatedHeight = newStream->width;
1201 rotatedWidth = newStream->height;
1202 }
1203
1204 /*
1205 * Sizes are different for each type of stream format check against
1206 * appropriate table.
1207 */
1208 switch (newStream->format) {
1209 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1210 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1211 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001212 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1213 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1214 mPDSupported) {
1215 if ((depthWidth == newStream->width) &&
1216 (depthHeight == newStream->height)) {
1217 sizeFound = true;
1218 }
1219 break;
1220 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001221 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1222 for (size_t i = 0; i < count; i++) {
1223 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1224 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1225 sizeFound = true;
1226 break;
1227 }
1228 }
1229 break;
1230 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001231 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1232 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001233 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001234 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001235 if ((depthSamplesCount == newStream->width) &&
1236 (1 == newStream->height)) {
1237 sizeFound = true;
1238 }
1239 break;
1240 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001241 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1242 /* Verify set size against generated sizes table */
1243 for (size_t i = 0; i < count; i++) {
1244 if (((int32_t)rotatedWidth ==
1245 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1246 ((int32_t)rotatedHeight ==
1247 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1248 sizeFound = true;
1249 break;
1250 }
1251 }
1252 break;
1253 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1254 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1255 default:
1256 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1257 || newStream->stream_type == CAMERA3_STREAM_INPUT
1258 || IS_USAGE_ZSL(newStream->usage)) {
1259 if (((int32_t)rotatedWidth ==
1260 gCamCapability[mCameraId]->active_array_size.width) &&
1261 ((int32_t)rotatedHeight ==
1262 gCamCapability[mCameraId]->active_array_size.height)) {
1263 sizeFound = true;
1264 break;
1265 }
1266 /* We could potentially break here to enforce ZSL stream
1267 * set from frameworks always is full active array size
1268 * but it is not clear from the spc if framework will always
1269 * follow that, also we have logic to override to full array
1270 * size, so keeping the logic lenient at the moment
1271 */
1272 }
1273 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1274 MAX_SIZES_CNT);
1275 for (size_t i = 0; i < count; i++) {
1276 if (((int32_t)rotatedWidth ==
1277 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1278 ((int32_t)rotatedHeight ==
1279 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1280 sizeFound = true;
1281 break;
1282 }
1283 }
1284 break;
1285 } /* End of switch(newStream->format) */
1286
1287 /* We error out even if a single stream has unsupported size set */
1288 if (!sizeFound) {
1289 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1290 rotatedWidth, rotatedHeight, newStream->format,
1291 gCamCapability[mCameraId]->active_array_size.width,
1292 gCamCapability[mCameraId]->active_array_size.height);
1293 rc = -EINVAL;
1294 break;
1295 }
1296 } /* End of for each stream */
1297 return rc;
1298}
1299
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001300/*===========================================================================
1301 * FUNCTION : validateUsageFlags
1302 *
1303 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1304 *
1305 * PARAMETERS :
1306 * @stream_list : streams to be configured
1307 *
1308 * RETURN :
1309 * NO_ERROR if the usage flags are supported
1310 * error code if usage flags are not supported
1311 *
1312 *==========================================================================*/
1313int QCamera3HardwareInterface::validateUsageFlags(
1314 const camera3_stream_configuration_t* streamList)
1315{
1316 for (size_t j = 0; j < streamList->num_streams; j++) {
1317 const camera3_stream_t *newStream = streamList->streams[j];
1318
1319 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1320 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1321 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1322 continue;
1323 }
1324
1325 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1326 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1327 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1328 bool forcePreviewUBWC = true;
1329 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1330 forcePreviewUBWC = false;
1331 }
1332 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1333 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1334 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1335 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1336 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1337 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1338
1339 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1340 // So color spaces will always match.
1341
1342 // Check whether underlying formats of shared streams match.
1343 if (isVideo && isPreview && videoFormat != previewFormat) {
1344 LOGE("Combined video and preview usage flag is not supported");
1345 return -EINVAL;
1346 }
1347 if (isPreview && isZSL && previewFormat != zslFormat) {
1348 LOGE("Combined preview and zsl usage flag is not supported");
1349 return -EINVAL;
1350 }
1351 if (isVideo && isZSL && videoFormat != zslFormat) {
1352 LOGE("Combined video and zsl usage flag is not supported");
1353 return -EINVAL;
1354 }
1355 }
1356 return NO_ERROR;
1357}
1358
1359/*===========================================================================
1360 * FUNCTION : validateUsageFlagsForEis
1361 *
1362 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1363 *
1364 * PARAMETERS :
1365 * @stream_list : streams to be configured
1366 *
1367 * RETURN :
1368 * NO_ERROR if the usage flags are supported
1369 * error code if usage flags are not supported
1370 *
1371 *==========================================================================*/
1372int QCamera3HardwareInterface::validateUsageFlagsForEis(
1373 const camera3_stream_configuration_t* streamList)
1374{
1375 for (size_t j = 0; j < streamList->num_streams; j++) {
1376 const camera3_stream_t *newStream = streamList->streams[j];
1377
1378 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1379 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1380
1381 // Because EIS is "hard-coded" for certain use case, and current
1382 // implementation doesn't support shared preview and video on the same
1383 // stream, return failure if EIS is forced on.
1384 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1385 LOGE("Combined video and preview usage flag is not supported due to EIS");
1386 return -EINVAL;
1387 }
1388 }
1389 return NO_ERROR;
1390}
1391
Thierry Strudel3d639192016-09-09 11:52:26 -07001392/*==============================================================================
1393 * FUNCTION : isSupportChannelNeeded
1394 *
1395 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1396 *
1397 * PARAMETERS :
1398 * @stream_list : streams to be configured
1399 * @stream_config_info : the config info for streams to be configured
1400 *
1401 * RETURN : Boolen true/false decision
1402 *
1403 *==========================================================================*/
1404bool QCamera3HardwareInterface::isSupportChannelNeeded(
1405 camera3_stream_configuration_t *streamList,
1406 cam_stream_size_info_t stream_config_info)
1407{
1408 uint32_t i;
1409 bool pprocRequested = false;
1410 /* Check for conditions where PProc pipeline does not have any streams*/
1411 for (i = 0; i < stream_config_info.num_streams; i++) {
1412 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1413 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1414 pprocRequested = true;
1415 break;
1416 }
1417 }
1418
1419 if (pprocRequested == false )
1420 return true;
1421
1422 /* Dummy stream needed if only raw or jpeg streams present */
1423 for (i = 0; i < streamList->num_streams; i++) {
1424 switch(streamList->streams[i]->format) {
1425 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1426 case HAL_PIXEL_FORMAT_RAW10:
1427 case HAL_PIXEL_FORMAT_RAW16:
1428 case HAL_PIXEL_FORMAT_BLOB:
1429 break;
1430 default:
1431 return false;
1432 }
1433 }
1434 return true;
1435}
1436
1437/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001438 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001439 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001440 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001441 *
1442 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001443 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001444 *
1445 * RETURN : int32_t type of status
1446 * NO_ERROR -- success
1447 * none-zero failure code
1448 *
1449 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001450int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001451{
1452 int32_t rc = NO_ERROR;
1453
1454 cam_dimension_t max_dim = {0, 0};
1455 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1456 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1457 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1458 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1459 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1460 }
1461
1462 clear_metadata_buffer(mParameters);
1463
1464 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1465 max_dim);
1466 if (rc != NO_ERROR) {
1467 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1468 return rc;
1469 }
1470
1471 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1472 if (rc != NO_ERROR) {
1473 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1474 return rc;
1475 }
1476
1477 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001478 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001479
1480 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1481 mParameters);
1482 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001483 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001484 return rc;
1485 }
1486
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001487 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001488 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1489 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1490 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1491 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1492 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001493
1494 return rc;
1495}
1496
1497/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001498 * FUNCTION : addToPPFeatureMask
1499 *
1500 * DESCRIPTION: add additional features to pp feature mask based on
1501 * stream type and usecase
1502 *
1503 * PARAMETERS :
1504 * @stream_format : stream type for feature mask
1505 * @stream_idx : stream idx within postprocess_mask list to change
1506 *
1507 * RETURN : NULL
1508 *
1509 *==========================================================================*/
1510void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1511 uint32_t stream_idx)
1512{
1513 char feature_mask_value[PROPERTY_VALUE_MAX];
1514 cam_feature_mask_t feature_mask;
1515 int args_converted;
1516 int property_len;
1517
1518 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001519#ifdef _LE_CAMERA_
1520 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1521 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1522 property_len = property_get("persist.camera.hal3.feature",
1523 feature_mask_value, swtnr_feature_mask_value);
1524#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001525 property_len = property_get("persist.camera.hal3.feature",
1526 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001527#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001528 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1529 (feature_mask_value[1] == 'x')) {
1530 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1531 } else {
1532 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1533 }
1534 if (1 != args_converted) {
1535 feature_mask = 0;
1536 LOGE("Wrong feature mask %s", feature_mask_value);
1537 return;
1538 }
1539
1540 switch (stream_format) {
1541 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1542 /* Add LLVD to pp feature mask only if video hint is enabled */
1543 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1544 mStreamConfigInfo.postprocess_mask[stream_idx]
1545 |= CAM_QTI_FEATURE_SW_TNR;
1546 LOGH("Added SW TNR to pp feature mask");
1547 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1548 mStreamConfigInfo.postprocess_mask[stream_idx]
1549 |= CAM_QCOM_FEATURE_LLVD;
1550 LOGH("Added LLVD SeeMore to pp feature mask");
1551 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001552 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1553 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1554 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1555 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001556 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1557 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1558 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1559 CAM_QTI_FEATURE_BINNING_CORRECTION;
1560 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001561 break;
1562 }
1563 default:
1564 break;
1565 }
1566 LOGD("PP feature mask %llx",
1567 mStreamConfigInfo.postprocess_mask[stream_idx]);
1568}
1569
1570/*==============================================================================
1571 * FUNCTION : updateFpsInPreviewBuffer
1572 *
1573 * DESCRIPTION: update FPS information in preview buffer.
1574 *
1575 * PARAMETERS :
1576 * @metadata : pointer to metadata buffer
1577 * @frame_number: frame_number to look for in pending buffer list
1578 *
1579 * RETURN : None
1580 *
1581 *==========================================================================*/
1582void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1583 uint32_t frame_number)
1584{
1585 // Mark all pending buffers for this particular request
1586 // with corresponding framerate information
1587 for (List<PendingBuffersInRequest>::iterator req =
1588 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1589 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1590 for(List<PendingBufferInfo>::iterator j =
1591 req->mPendingBufferList.begin();
1592 j != req->mPendingBufferList.end(); j++) {
1593 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1594 if ((req->frame_number == frame_number) &&
1595 (channel->getStreamTypeMask() &
1596 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1597 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1598 CAM_INTF_PARM_FPS_RANGE, metadata) {
1599 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1600 struct private_handle_t *priv_handle =
1601 (struct private_handle_t *)(*(j->buffer));
1602 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1603 }
1604 }
1605 }
1606 }
1607}
1608
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001609/*==============================================================================
1610 * FUNCTION : updateTimeStampInPendingBuffers
1611 *
1612 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1613 * of a frame number
1614 *
1615 * PARAMETERS :
1616 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1617 * @timestamp : timestamp to be set
1618 *
1619 * RETURN : None
1620 *
1621 *==========================================================================*/
1622void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1623 uint32_t frameNumber, nsecs_t timestamp)
1624{
1625 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1626 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1627 if (req->frame_number != frameNumber)
1628 continue;
1629
1630 for (auto k = req->mPendingBufferList.begin();
1631 k != req->mPendingBufferList.end(); k++ ) {
1632 struct private_handle_t *priv_handle =
1633 (struct private_handle_t *) (*(k->buffer));
1634 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1635 }
1636 }
1637 return;
1638}
1639
Thierry Strudel3d639192016-09-09 11:52:26 -07001640/*===========================================================================
1641 * FUNCTION : configureStreams
1642 *
1643 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1644 * and output streams.
1645 *
1646 * PARAMETERS :
1647 * @stream_list : streams to be configured
1648 *
1649 * RETURN :
1650 *
1651 *==========================================================================*/
1652int QCamera3HardwareInterface::configureStreams(
1653 camera3_stream_configuration_t *streamList)
1654{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001655 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001656 int rc = 0;
1657
1658 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001659 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001660 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001661 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001662
1663 return rc;
1664}
1665
1666/*===========================================================================
1667 * FUNCTION : configureStreamsPerfLocked
1668 *
1669 * DESCRIPTION: configureStreams while perfLock is held.
1670 *
1671 * PARAMETERS :
1672 * @stream_list : streams to be configured
1673 *
1674 * RETURN : int32_t type of status
1675 * NO_ERROR -- success
1676 * none-zero failure code
1677 *==========================================================================*/
1678int QCamera3HardwareInterface::configureStreamsPerfLocked(
1679 camera3_stream_configuration_t *streamList)
1680{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001681 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001682 int rc = 0;
1683
1684 // Sanity check stream_list
1685 if (streamList == NULL) {
1686 LOGE("NULL stream configuration");
1687 return BAD_VALUE;
1688 }
1689 if (streamList->streams == NULL) {
1690 LOGE("NULL stream list");
1691 return BAD_VALUE;
1692 }
1693
1694 if (streamList->num_streams < 1) {
1695 LOGE("Bad number of streams requested: %d",
1696 streamList->num_streams);
1697 return BAD_VALUE;
1698 }
1699
1700 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1701 LOGE("Maximum number of streams %d exceeded: %d",
1702 MAX_NUM_STREAMS, streamList->num_streams);
1703 return BAD_VALUE;
1704 }
1705
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001706 rc = validateUsageFlags(streamList);
1707 if (rc != NO_ERROR) {
1708 return rc;
1709 }
1710
Thierry Strudel3d639192016-09-09 11:52:26 -07001711 mOpMode = streamList->operation_mode;
1712 LOGD("mOpMode: %d", mOpMode);
1713
1714 /* first invalidate all the steams in the mStreamList
1715 * if they appear again, they will be validated */
1716 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1717 it != mStreamInfo.end(); it++) {
1718 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1719 if (channel) {
1720 channel->stop();
1721 }
1722 (*it)->status = INVALID;
1723 }
1724
1725 if (mRawDumpChannel) {
1726 mRawDumpChannel->stop();
1727 delete mRawDumpChannel;
1728 mRawDumpChannel = NULL;
1729 }
1730
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001731 if (mHdrPlusRawSrcChannel) {
1732 mHdrPlusRawSrcChannel->stop();
1733 delete mHdrPlusRawSrcChannel;
1734 mHdrPlusRawSrcChannel = NULL;
1735 }
1736
Thierry Strudel3d639192016-09-09 11:52:26 -07001737 if (mSupportChannel)
1738 mSupportChannel->stop();
1739
1740 if (mAnalysisChannel) {
1741 mAnalysisChannel->stop();
1742 }
1743 if (mMetadataChannel) {
1744 /* If content of mStreamInfo is not 0, there is metadata stream */
1745 mMetadataChannel->stop();
1746 }
1747 if (mChannelHandle) {
1748 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1749 mChannelHandle);
1750 LOGD("stopping channel %d", mChannelHandle);
1751 }
1752
1753 pthread_mutex_lock(&mMutex);
1754
1755 // Check state
1756 switch (mState) {
1757 case INITIALIZED:
1758 case CONFIGURED:
1759 case STARTED:
1760 /* valid state */
1761 break;
1762 default:
1763 LOGE("Invalid state %d", mState);
1764 pthread_mutex_unlock(&mMutex);
1765 return -ENODEV;
1766 }
1767
1768 /* Check whether we have video stream */
1769 m_bIs4KVideo = false;
1770 m_bIsVideo = false;
1771 m_bEisSupportedSize = false;
1772 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001773 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001774 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001775 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001776 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001777 uint32_t videoWidth = 0U;
1778 uint32_t videoHeight = 0U;
1779 size_t rawStreamCnt = 0;
1780 size_t stallStreamCnt = 0;
1781 size_t processedStreamCnt = 0;
1782 // Number of streams on ISP encoder path
1783 size_t numStreamsOnEncoder = 0;
1784 size_t numYuv888OnEncoder = 0;
1785 bool bYuv888OverrideJpeg = false;
1786 cam_dimension_t largeYuv888Size = {0, 0};
1787 cam_dimension_t maxViewfinderSize = {0, 0};
1788 bool bJpegExceeds4K = false;
1789 bool bJpegOnEncoder = false;
1790 bool bUseCommonFeatureMask = false;
1791 cam_feature_mask_t commonFeatureMask = 0;
1792 bool bSmallJpegSize = false;
1793 uint32_t width_ratio;
1794 uint32_t height_ratio;
1795 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1796 camera3_stream_t *inputStream = NULL;
1797 bool isJpeg = false;
1798 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001799 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001800 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001801
1802 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1803
1804 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001805 uint8_t eis_prop_set;
1806 uint32_t maxEisWidth = 0;
1807 uint32_t maxEisHeight = 0;
1808
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001809 // Initialize all instant AEC related variables
1810 mInstantAEC = false;
1811 mResetInstantAEC = false;
1812 mInstantAECSettledFrameNumber = 0;
1813 mAecSkipDisplayFrameBound = 0;
1814 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001815 mCurrFeatureState = 0;
1816 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001817
Thierry Strudel3d639192016-09-09 11:52:26 -07001818 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1819
1820 size_t count = IS_TYPE_MAX;
1821 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1822 for (size_t i = 0; i < count; i++) {
1823 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001824 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1825 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001826 break;
1827 }
1828 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001829
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001830 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001831 maxEisWidth = MAX_EIS_WIDTH;
1832 maxEisHeight = MAX_EIS_HEIGHT;
1833 }
1834
1835 /* EIS setprop control */
1836 char eis_prop[PROPERTY_VALUE_MAX];
1837 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001838 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001839 eis_prop_set = (uint8_t)atoi(eis_prop);
1840
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001841 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1843
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001844 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1845 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001846
Thierry Strudel3d639192016-09-09 11:52:26 -07001847 /* stream configurations */
1848 for (size_t i = 0; i < streamList->num_streams; i++) {
1849 camera3_stream_t *newStream = streamList->streams[i];
1850 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1851 "height = %d, rotation = %d, usage = 0x%x",
1852 i, newStream->stream_type, newStream->format,
1853 newStream->width, newStream->height, newStream->rotation,
1854 newStream->usage);
1855 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1856 newStream->stream_type == CAMERA3_STREAM_INPUT){
1857 isZsl = true;
1858 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001859 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1860 IS_USAGE_PREVIEW(newStream->usage)) {
1861 isPreview = true;
1862 }
1863
Thierry Strudel3d639192016-09-09 11:52:26 -07001864 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1865 inputStream = newStream;
1866 }
1867
Emilian Peev7650c122017-01-19 08:24:33 -08001868 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1869 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001870 isJpeg = true;
1871 jpegSize.width = newStream->width;
1872 jpegSize.height = newStream->height;
1873 if (newStream->width > VIDEO_4K_WIDTH ||
1874 newStream->height > VIDEO_4K_HEIGHT)
1875 bJpegExceeds4K = true;
1876 }
1877
1878 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1879 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1880 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001881 // In HAL3 we can have multiple different video streams.
1882 // The variables video width and height are used below as
1883 // dimensions of the biggest of them
1884 if (videoWidth < newStream->width ||
1885 videoHeight < newStream->height) {
1886 videoWidth = newStream->width;
1887 videoHeight = newStream->height;
1888 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001889 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1890 (VIDEO_4K_HEIGHT <= newStream->height)) {
1891 m_bIs4KVideo = true;
1892 }
1893 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1894 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001895
Thierry Strudel3d639192016-09-09 11:52:26 -07001896 }
1897 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1898 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1899 switch (newStream->format) {
1900 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001901 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1902 depthPresent = true;
1903 break;
1904 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001905 stallStreamCnt++;
1906 if (isOnEncoder(maxViewfinderSize, newStream->width,
1907 newStream->height)) {
1908 numStreamsOnEncoder++;
1909 bJpegOnEncoder = true;
1910 }
1911 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1912 newStream->width);
1913 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1914 newStream->height);;
1915 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1916 "FATAL: max_downscale_factor cannot be zero and so assert");
1917 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1918 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1919 LOGH("Setting small jpeg size flag to true");
1920 bSmallJpegSize = true;
1921 }
1922 break;
1923 case HAL_PIXEL_FORMAT_RAW10:
1924 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1925 case HAL_PIXEL_FORMAT_RAW16:
1926 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001927 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1928 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1929 pdStatCount++;
1930 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001931 break;
1932 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1933 processedStreamCnt++;
1934 if (isOnEncoder(maxViewfinderSize, newStream->width,
1935 newStream->height)) {
1936 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1937 !IS_USAGE_ZSL(newStream->usage)) {
1938 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1939 }
1940 numStreamsOnEncoder++;
1941 }
1942 break;
1943 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1944 processedStreamCnt++;
1945 if (isOnEncoder(maxViewfinderSize, newStream->width,
1946 newStream->height)) {
1947 // If Yuv888 size is not greater than 4K, set feature mask
1948 // to SUPERSET so that it support concurrent request on
1949 // YUV and JPEG.
1950 if (newStream->width <= VIDEO_4K_WIDTH &&
1951 newStream->height <= VIDEO_4K_HEIGHT) {
1952 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1953 }
1954 numStreamsOnEncoder++;
1955 numYuv888OnEncoder++;
1956 largeYuv888Size.width = newStream->width;
1957 largeYuv888Size.height = newStream->height;
1958 }
1959 break;
1960 default:
1961 processedStreamCnt++;
1962 if (isOnEncoder(maxViewfinderSize, newStream->width,
1963 newStream->height)) {
1964 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1965 numStreamsOnEncoder++;
1966 }
1967 break;
1968 }
1969
1970 }
1971 }
1972
1973 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1974 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1975 !m_bIsVideo) {
1976 m_bEisEnable = false;
1977 }
1978
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001979 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1980 pthread_mutex_unlock(&mMutex);
1981 return -EINVAL;
1982 }
1983
Thierry Strudel54dc9782017-02-15 12:12:10 -08001984 uint8_t forceEnableTnr = 0;
1985 char tnr_prop[PROPERTY_VALUE_MAX];
1986 memset(tnr_prop, 0, sizeof(tnr_prop));
1987 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1988 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1989
Thierry Strudel3d639192016-09-09 11:52:26 -07001990 /* Logic to enable/disable TNR based on specific config size/etc.*/
1991 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001992 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1993 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001994 else if (forceEnableTnr)
1995 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001996
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001997 char videoHdrProp[PROPERTY_VALUE_MAX];
1998 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1999 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2000 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2001
2002 if (hdr_mode_prop == 1 && m_bIsVideo &&
2003 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2004 m_bVideoHdrEnabled = true;
2005 else
2006 m_bVideoHdrEnabled = false;
2007
2008
Thierry Strudel3d639192016-09-09 11:52:26 -07002009 /* Check if num_streams is sane */
2010 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2011 rawStreamCnt > MAX_RAW_STREAMS ||
2012 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2013 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2014 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2015 pthread_mutex_unlock(&mMutex);
2016 return -EINVAL;
2017 }
2018 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002019 if (isZsl && m_bIs4KVideo) {
2020 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002021 pthread_mutex_unlock(&mMutex);
2022 return -EINVAL;
2023 }
2024 /* Check if stream sizes are sane */
2025 if (numStreamsOnEncoder > 2) {
2026 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2027 pthread_mutex_unlock(&mMutex);
2028 return -EINVAL;
2029 } else if (1 < numStreamsOnEncoder){
2030 bUseCommonFeatureMask = true;
2031 LOGH("Multiple streams above max viewfinder size, common mask needed");
2032 }
2033
2034 /* Check if BLOB size is greater than 4k in 4k recording case */
2035 if (m_bIs4KVideo && bJpegExceeds4K) {
2036 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2037 pthread_mutex_unlock(&mMutex);
2038 return -EINVAL;
2039 }
2040
Emilian Peev7650c122017-01-19 08:24:33 -08002041 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2042 depthPresent) {
2043 LOGE("HAL doesn't support depth streams in HFR mode!");
2044 pthread_mutex_unlock(&mMutex);
2045 return -EINVAL;
2046 }
2047
Thierry Strudel3d639192016-09-09 11:52:26 -07002048 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2049 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2050 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2051 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2052 // configurations:
2053 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2054 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2055 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2056 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2057 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2058 __func__);
2059 pthread_mutex_unlock(&mMutex);
2060 return -EINVAL;
2061 }
2062
2063 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2064 // the YUV stream's size is greater or equal to the JPEG size, set common
2065 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2066 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2067 jpegSize.width, jpegSize.height) &&
2068 largeYuv888Size.width > jpegSize.width &&
2069 largeYuv888Size.height > jpegSize.height) {
2070 bYuv888OverrideJpeg = true;
2071 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2072 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2073 }
2074
2075 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2076 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2077 commonFeatureMask);
2078 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2079 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2080
2081 rc = validateStreamDimensions(streamList);
2082 if (rc == NO_ERROR) {
2083 rc = validateStreamRotations(streamList);
2084 }
2085 if (rc != NO_ERROR) {
2086 LOGE("Invalid stream configuration requested!");
2087 pthread_mutex_unlock(&mMutex);
2088 return rc;
2089 }
2090
Emilian Peev0f3c3162017-03-15 12:57:46 +00002091 if (1 < pdStatCount) {
2092 LOGE("HAL doesn't support multiple PD streams");
2093 pthread_mutex_unlock(&mMutex);
2094 return -EINVAL;
2095 }
2096
2097 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2098 (1 == pdStatCount)) {
2099 LOGE("HAL doesn't support PD streams in HFR mode!");
2100 pthread_mutex_unlock(&mMutex);
2101 return -EINVAL;
2102 }
2103
Thierry Strudel3d639192016-09-09 11:52:26 -07002104 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2105 for (size_t i = 0; i < streamList->num_streams; i++) {
2106 camera3_stream_t *newStream = streamList->streams[i];
2107 LOGH("newStream type = %d, stream format = %d "
2108 "stream size : %d x %d, stream rotation = %d",
2109 newStream->stream_type, newStream->format,
2110 newStream->width, newStream->height, newStream->rotation);
2111 //if the stream is in the mStreamList validate it
2112 bool stream_exists = false;
2113 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2114 it != mStreamInfo.end(); it++) {
2115 if ((*it)->stream == newStream) {
2116 QCamera3ProcessingChannel *channel =
2117 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2118 stream_exists = true;
2119 if (channel)
2120 delete channel;
2121 (*it)->status = VALID;
2122 (*it)->stream->priv = NULL;
2123 (*it)->channel = NULL;
2124 }
2125 }
2126 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2127 //new stream
2128 stream_info_t* stream_info;
2129 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2130 if (!stream_info) {
2131 LOGE("Could not allocate stream info");
2132 rc = -ENOMEM;
2133 pthread_mutex_unlock(&mMutex);
2134 return rc;
2135 }
2136 stream_info->stream = newStream;
2137 stream_info->status = VALID;
2138 stream_info->channel = NULL;
2139 mStreamInfo.push_back(stream_info);
2140 }
2141 /* Covers Opaque ZSL and API1 F/W ZSL */
2142 if (IS_USAGE_ZSL(newStream->usage)
2143 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2144 if (zslStream != NULL) {
2145 LOGE("Multiple input/reprocess streams requested!");
2146 pthread_mutex_unlock(&mMutex);
2147 return BAD_VALUE;
2148 }
2149 zslStream = newStream;
2150 }
2151 /* Covers YUV reprocess */
2152 if (inputStream != NULL) {
2153 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2154 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2155 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2156 && inputStream->width == newStream->width
2157 && inputStream->height == newStream->height) {
2158 if (zslStream != NULL) {
2159 /* This scenario indicates multiple YUV streams with same size
2160 * as input stream have been requested, since zsl stream handle
2161 * is solely use for the purpose of overriding the size of streams
2162 * which share h/w streams we will just make a guess here as to
2163 * which of the stream is a ZSL stream, this will be refactored
2164 * once we make generic logic for streams sharing encoder output
2165 */
2166 LOGH("Warning, Multiple ip/reprocess streams requested!");
2167 }
2168 zslStream = newStream;
2169 }
2170 }
2171 }
2172
2173 /* If a zsl stream is set, we know that we have configured at least one input or
2174 bidirectional stream */
2175 if (NULL != zslStream) {
2176 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2177 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2178 mInputStreamInfo.format = zslStream->format;
2179 mInputStreamInfo.usage = zslStream->usage;
2180 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2181 mInputStreamInfo.dim.width,
2182 mInputStreamInfo.dim.height,
2183 mInputStreamInfo.format, mInputStreamInfo.usage);
2184 }
2185
2186 cleanAndSortStreamInfo();
2187 if (mMetadataChannel) {
2188 delete mMetadataChannel;
2189 mMetadataChannel = NULL;
2190 }
2191 if (mSupportChannel) {
2192 delete mSupportChannel;
2193 mSupportChannel = NULL;
2194 }
2195
2196 if (mAnalysisChannel) {
2197 delete mAnalysisChannel;
2198 mAnalysisChannel = NULL;
2199 }
2200
2201 if (mDummyBatchChannel) {
2202 delete mDummyBatchChannel;
2203 mDummyBatchChannel = NULL;
2204 }
2205
Emilian Peev7650c122017-01-19 08:24:33 -08002206 if (mDepthChannel) {
2207 mDepthChannel = NULL;
2208 }
2209
Thierry Strudel2896d122017-02-23 19:18:03 -08002210 char is_type_value[PROPERTY_VALUE_MAX];
2211 property_get("persist.camera.is_type", is_type_value, "4");
2212 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2213
Binhao Line406f062017-05-03 14:39:44 -07002214 char property_value[PROPERTY_VALUE_MAX];
2215 property_get("persist.camera.gzoom.at", property_value, "0");
2216 int goog_zoom_at = atoi(property_value);
2217 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2218 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2219
2220 property_get("persist.camera.gzoom.4k", property_value, "0");
2221 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2222
Thierry Strudel3d639192016-09-09 11:52:26 -07002223 //Create metadata channel and initialize it
2224 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2225 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2226 gCamCapability[mCameraId]->color_arrangement);
2227 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2228 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002229 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002230 if (mMetadataChannel == NULL) {
2231 LOGE("failed to allocate metadata channel");
2232 rc = -ENOMEM;
2233 pthread_mutex_unlock(&mMutex);
2234 return rc;
2235 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002236 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002237 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2238 if (rc < 0) {
2239 LOGE("metadata channel initialization failed");
2240 delete mMetadataChannel;
2241 mMetadataChannel = NULL;
2242 pthread_mutex_unlock(&mMutex);
2243 return rc;
2244 }
2245
Thierry Strudel2896d122017-02-23 19:18:03 -08002246 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002247 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002248 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002249 // Keep track of preview/video streams indices.
2250 // There could be more than one preview streams, but only one video stream.
2251 int32_t video_stream_idx = -1;
2252 int32_t preview_stream_idx[streamList->num_streams];
2253 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002254 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2255 /* Allocate channel objects for the requested streams */
2256 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002257
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 camera3_stream_t *newStream = streamList->streams[i];
2259 uint32_t stream_usage = newStream->usage;
2260 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2261 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2262 struct camera_info *p_info = NULL;
2263 pthread_mutex_lock(&gCamLock);
2264 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2265 pthread_mutex_unlock(&gCamLock);
2266 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2267 || IS_USAGE_ZSL(newStream->usage)) &&
2268 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002269 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002270 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002271 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2272 if (bUseCommonFeatureMask)
2273 zsl_ppmask = commonFeatureMask;
2274 else
2275 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002277 if (numStreamsOnEncoder > 0)
2278 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2279 else
2280 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002281 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002282 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002283 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002284 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002285 LOGH("Input stream configured, reprocess config");
2286 } else {
2287 //for non zsl streams find out the format
2288 switch (newStream->format) {
2289 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2290 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002291 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002292 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2293 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2294 /* add additional features to pp feature mask */
2295 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2296 mStreamConfigInfo.num_streams);
2297
2298 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2299 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2300 CAM_STREAM_TYPE_VIDEO;
2301 if (m_bTnrEnabled && m_bTnrVideo) {
2302 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2303 CAM_QCOM_FEATURE_CPP_TNR;
2304 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2305 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2306 ~CAM_QCOM_FEATURE_CDS;
2307 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002308 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2309 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2310 CAM_QTI_FEATURE_PPEISCORE;
2311 }
Binhao Line406f062017-05-03 14:39:44 -07002312 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2313 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2314 CAM_QCOM_FEATURE_GOOG_ZOOM;
2315 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002316 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002317 } else {
2318 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2319 CAM_STREAM_TYPE_PREVIEW;
2320 if (m_bTnrEnabled && m_bTnrPreview) {
2321 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2322 CAM_QCOM_FEATURE_CPP_TNR;
2323 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2324 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2325 ~CAM_QCOM_FEATURE_CDS;
2326 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002327 if(!m_bSwTnrPreview) {
2328 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2329 ~CAM_QTI_FEATURE_SW_TNR;
2330 }
Binhao Line406f062017-05-03 14:39:44 -07002331 if (is_goog_zoom_preview_enabled) {
2332 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2333 CAM_QCOM_FEATURE_GOOG_ZOOM;
2334 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002335 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002336 padding_info.width_padding = mSurfaceStridePadding;
2337 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002338 previewSize.width = (int32_t)newStream->width;
2339 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002340 }
2341 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2342 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2343 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2344 newStream->height;
2345 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2346 newStream->width;
2347 }
2348 }
2349 break;
2350 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002351 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002352 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2353 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2354 if (bUseCommonFeatureMask)
2355 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2356 commonFeatureMask;
2357 else
2358 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2359 CAM_QCOM_FEATURE_NONE;
2360 } else {
2361 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2362 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2363 }
2364 break;
2365 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002366 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002367 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2368 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2369 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2370 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2371 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002372 /* Remove rotation if it is not supported
2373 for 4K LiveVideo snapshot case (online processing) */
2374 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2375 CAM_QCOM_FEATURE_ROTATION)) {
2376 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2377 &= ~CAM_QCOM_FEATURE_ROTATION;
2378 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002379 } else {
2380 if (bUseCommonFeatureMask &&
2381 isOnEncoder(maxViewfinderSize, newStream->width,
2382 newStream->height)) {
2383 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2384 } else {
2385 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2386 }
2387 }
2388 if (isZsl) {
2389 if (zslStream) {
2390 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2391 (int32_t)zslStream->width;
2392 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2393 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002394 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2395 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002396 } else {
2397 LOGE("Error, No ZSL stream identified");
2398 pthread_mutex_unlock(&mMutex);
2399 return -EINVAL;
2400 }
2401 } else if (m_bIs4KVideo) {
2402 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2403 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2404 } else if (bYuv888OverrideJpeg) {
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2406 (int32_t)largeYuv888Size.width;
2407 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2408 (int32_t)largeYuv888Size.height;
2409 }
2410 break;
2411 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2412 case HAL_PIXEL_FORMAT_RAW16:
2413 case HAL_PIXEL_FORMAT_RAW10:
2414 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2415 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2416 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002417 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2418 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2419 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2420 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2421 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2422 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2423 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2424 gCamCapability[mCameraId]->dt[mPDIndex];
2425 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2426 gCamCapability[mCameraId]->vc[mPDIndex];
2427 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 break;
2429 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002430 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002431 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2432 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2433 break;
2434 }
2435 }
2436
2437 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2438 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2439 gCamCapability[mCameraId]->color_arrangement);
2440
2441 if (newStream->priv == NULL) {
2442 //New stream, construct channel
2443 switch (newStream->stream_type) {
2444 case CAMERA3_STREAM_INPUT:
2445 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2446 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2447 break;
2448 case CAMERA3_STREAM_BIDIRECTIONAL:
2449 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2450 GRALLOC_USAGE_HW_CAMERA_WRITE;
2451 break;
2452 case CAMERA3_STREAM_OUTPUT:
2453 /* For video encoding stream, set read/write rarely
2454 * flag so that they may be set to un-cached */
2455 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2456 newStream->usage |=
2457 (GRALLOC_USAGE_SW_READ_RARELY |
2458 GRALLOC_USAGE_SW_WRITE_RARELY |
2459 GRALLOC_USAGE_HW_CAMERA_WRITE);
2460 else if (IS_USAGE_ZSL(newStream->usage))
2461 {
2462 LOGD("ZSL usage flag skipping");
2463 }
2464 else if (newStream == zslStream
2465 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2466 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2467 } else
2468 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2469 break;
2470 default:
2471 LOGE("Invalid stream_type %d", newStream->stream_type);
2472 break;
2473 }
2474
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002475 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002476 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2477 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2478 QCamera3ProcessingChannel *channel = NULL;
2479 switch (newStream->format) {
2480 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2481 if ((newStream->usage &
2482 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2483 (streamList->operation_mode ==
2484 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2485 ) {
2486 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2487 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002488 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 this,
2490 newStream,
2491 (cam_stream_type_t)
2492 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2493 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2494 mMetadataChannel,
2495 0); //heap buffers are not required for HFR video channel
2496 if (channel == NULL) {
2497 LOGE("allocation of channel failed");
2498 pthread_mutex_unlock(&mMutex);
2499 return -ENOMEM;
2500 }
2501 //channel->getNumBuffers() will return 0 here so use
2502 //MAX_INFLIGH_HFR_REQUESTS
2503 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2504 newStream->priv = channel;
2505 LOGI("num video buffers in HFR mode: %d",
2506 MAX_INFLIGHT_HFR_REQUESTS);
2507 } else {
2508 /* Copy stream contents in HFR preview only case to create
2509 * dummy batch channel so that sensor streaming is in
2510 * HFR mode */
2511 if (!m_bIsVideo && (streamList->operation_mode ==
2512 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2513 mDummyBatchStream = *newStream;
2514 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 int bufferCount = MAX_INFLIGHT_REQUESTS;
2516 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2517 CAM_STREAM_TYPE_VIDEO) {
2518 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2519 bufferCount = MAX_VIDEO_BUFFERS;
2520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002521 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2522 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002523 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002524 this,
2525 newStream,
2526 (cam_stream_type_t)
2527 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2529 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002530 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002531 if (channel == NULL) {
2532 LOGE("allocation of channel failed");
2533 pthread_mutex_unlock(&mMutex);
2534 return -ENOMEM;
2535 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002536 /* disable UBWC for preview, though supported,
2537 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002538 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002539 (previewSize.width == (int32_t)videoWidth)&&
2540 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002541 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002542 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002543 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002544 /* When goog_zoom is linked to the preview or video stream,
2545 * disable ubwc to the linked stream */
2546 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2547 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2548 channel->setUBWCEnabled(false);
2549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002550 newStream->max_buffers = channel->getNumBuffers();
2551 newStream->priv = channel;
2552 }
2553 break;
2554 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2555 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2556 mChannelHandle,
2557 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002558 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002559 this,
2560 newStream,
2561 (cam_stream_type_t)
2562 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2563 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2564 mMetadataChannel);
2565 if (channel == NULL) {
2566 LOGE("allocation of YUV channel failed");
2567 pthread_mutex_unlock(&mMutex);
2568 return -ENOMEM;
2569 }
2570 newStream->max_buffers = channel->getNumBuffers();
2571 newStream->priv = channel;
2572 break;
2573 }
2574 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2575 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002576 case HAL_PIXEL_FORMAT_RAW10: {
2577 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2578 (HAL_DATASPACE_DEPTH != newStream->data_space))
2579 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002580 mRawChannel = new QCamera3RawChannel(
2581 mCameraHandle->camera_handle, mChannelHandle,
2582 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002583 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002584 this, newStream,
2585 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002586 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002587 if (mRawChannel == NULL) {
2588 LOGE("allocation of raw channel failed");
2589 pthread_mutex_unlock(&mMutex);
2590 return -ENOMEM;
2591 }
2592 newStream->max_buffers = mRawChannel->getNumBuffers();
2593 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2594 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002595 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002596 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002597 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2598 mDepthChannel = new QCamera3DepthChannel(
2599 mCameraHandle->camera_handle, mChannelHandle,
2600 mCameraHandle->ops, NULL, NULL, &padding_info,
2601 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2602 mMetadataChannel);
2603 if (NULL == mDepthChannel) {
2604 LOGE("Allocation of depth channel failed");
2605 pthread_mutex_unlock(&mMutex);
2606 return NO_MEMORY;
2607 }
2608 newStream->priv = mDepthChannel;
2609 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2610 } else {
2611 // Max live snapshot inflight buffer is 1. This is to mitigate
2612 // frame drop issues for video snapshot. The more buffers being
2613 // allocated, the more frame drops there are.
2614 mPictureChannel = new QCamera3PicChannel(
2615 mCameraHandle->camera_handle, mChannelHandle,
2616 mCameraHandle->ops, captureResultCb,
2617 setBufferErrorStatus, &padding_info, this, newStream,
2618 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2619 m_bIs4KVideo, isZsl, mMetadataChannel,
2620 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2621 if (mPictureChannel == NULL) {
2622 LOGE("allocation of channel failed");
2623 pthread_mutex_unlock(&mMutex);
2624 return -ENOMEM;
2625 }
2626 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2627 newStream->max_buffers = mPictureChannel->getNumBuffers();
2628 mPictureChannel->overrideYuvSize(
2629 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2630 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002631 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 break;
2633
2634 default:
2635 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002636 pthread_mutex_unlock(&mMutex);
2637 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002638 }
2639 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2640 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2641 } else {
2642 LOGE("Error, Unknown stream type");
2643 pthread_mutex_unlock(&mMutex);
2644 return -EINVAL;
2645 }
2646
2647 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002648 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2649 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002650 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002651 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002652 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2653 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2654 }
2655 }
2656
2657 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2658 it != mStreamInfo.end(); it++) {
2659 if ((*it)->stream == newStream) {
2660 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2661 break;
2662 }
2663 }
2664 } else {
2665 // Channel already exists for this stream
2666 // Do nothing for now
2667 }
2668 padding_info = gCamCapability[mCameraId]->padding_info;
2669
Emilian Peev7650c122017-01-19 08:24:33 -08002670 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 * since there is no real stream associated with it
2672 */
Emilian Peev7650c122017-01-19 08:24:33 -08002673 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002674 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2675 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002676 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002677 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 }
2679
Binhao Lincdb362a2017-04-20 13:31:54 -07002680 // By default, preview stream TNR is disabled.
2681 // Enable TNR to the preview stream if all conditions below are satisfied:
2682 // 1. resolution <= 1080p.
2683 // 2. preview resolution == video resolution.
2684 // 3. video stream TNR is enabled.
2685 // 4. EIS2.0
2686 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2687 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2688 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2689 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2690 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2691 video_stream->width == preview_stream->width &&
2692 video_stream->height == preview_stream->height) {
2693 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2694 CAM_QCOM_FEATURE_CPP_TNR;
2695 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2696 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2697 ~CAM_QCOM_FEATURE_CDS;
2698 }
2699 }
2700
Thierry Strudel2896d122017-02-23 19:18:03 -08002701 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2702 onlyRaw = false;
2703 }
2704
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002705 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002706 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002707 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002708 cam_analysis_info_t analysisInfo;
2709 int32_t ret = NO_ERROR;
2710 ret = mCommon.getAnalysisInfo(
2711 FALSE,
2712 analysisFeatureMask,
2713 &analysisInfo);
2714 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002715 cam_color_filter_arrangement_t analysis_color_arrangement =
2716 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2717 CAM_FILTER_ARRANGEMENT_Y :
2718 gCamCapability[mCameraId]->color_arrangement);
2719 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2720 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002721 cam_dimension_t analysisDim;
2722 analysisDim = mCommon.getMatchingDimension(previewSize,
2723 analysisInfo.analysis_recommended_res);
2724
2725 mAnalysisChannel = new QCamera3SupportChannel(
2726 mCameraHandle->camera_handle,
2727 mChannelHandle,
2728 mCameraHandle->ops,
2729 &analysisInfo.analysis_padding_info,
2730 analysisFeatureMask,
2731 CAM_STREAM_TYPE_ANALYSIS,
2732 &analysisDim,
2733 (analysisInfo.analysis_format
2734 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2735 : CAM_FORMAT_YUV_420_NV21),
2736 analysisInfo.hw_analysis_supported,
2737 gCamCapability[mCameraId]->color_arrangement,
2738 this,
2739 0); // force buffer count to 0
2740 } else {
2741 LOGW("getAnalysisInfo failed, ret = %d", ret);
2742 }
2743 if (!mAnalysisChannel) {
2744 LOGW("Analysis channel cannot be created");
2745 }
2746 }
2747
Thierry Strudel3d639192016-09-09 11:52:26 -07002748 //RAW DUMP channel
2749 if (mEnableRawDump && isRawStreamRequested == false){
2750 cam_dimension_t rawDumpSize;
2751 rawDumpSize = getMaxRawSize(mCameraId);
2752 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2753 setPAAFSupport(rawDumpFeatureMask,
2754 CAM_STREAM_TYPE_RAW,
2755 gCamCapability[mCameraId]->color_arrangement);
2756 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2757 mChannelHandle,
2758 mCameraHandle->ops,
2759 rawDumpSize,
2760 &padding_info,
2761 this, rawDumpFeatureMask);
2762 if (!mRawDumpChannel) {
2763 LOGE("Raw Dump channel cannot be created");
2764 pthread_mutex_unlock(&mMutex);
2765 return -ENOMEM;
2766 }
2767 }
2768
Thierry Strudel3d639192016-09-09 11:52:26 -07002769 if (mAnalysisChannel) {
2770 cam_analysis_info_t analysisInfo;
2771 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2772 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2773 CAM_STREAM_TYPE_ANALYSIS;
2774 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2775 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002776 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002777 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2778 &analysisInfo);
2779 if (rc != NO_ERROR) {
2780 LOGE("getAnalysisInfo failed, ret = %d", rc);
2781 pthread_mutex_unlock(&mMutex);
2782 return rc;
2783 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002784 cam_color_filter_arrangement_t analysis_color_arrangement =
2785 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2786 CAM_FILTER_ARRANGEMENT_Y :
2787 gCamCapability[mCameraId]->color_arrangement);
2788 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2789 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2790 analysis_color_arrangement);
2791
Thierry Strudel3d639192016-09-09 11:52:26 -07002792 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002793 mCommon.getMatchingDimension(previewSize,
2794 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002795 mStreamConfigInfo.num_streams++;
2796 }
2797
Thierry Strudel2896d122017-02-23 19:18:03 -08002798 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002799 cam_analysis_info_t supportInfo;
2800 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2801 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2802 setPAAFSupport(callbackFeatureMask,
2803 CAM_STREAM_TYPE_CALLBACK,
2804 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002805 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002806 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002807 if (ret != NO_ERROR) {
2808 /* Ignore the error for Mono camera
2809 * because the PAAF bit mask is only set
2810 * for CAM_STREAM_TYPE_ANALYSIS stream type
2811 */
2812 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2813 LOGW("getAnalysisInfo failed, ret = %d", ret);
2814 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002815 }
2816 mSupportChannel = new QCamera3SupportChannel(
2817 mCameraHandle->camera_handle,
2818 mChannelHandle,
2819 mCameraHandle->ops,
2820 &gCamCapability[mCameraId]->padding_info,
2821 callbackFeatureMask,
2822 CAM_STREAM_TYPE_CALLBACK,
2823 &QCamera3SupportChannel::kDim,
2824 CAM_FORMAT_YUV_420_NV21,
2825 supportInfo.hw_analysis_supported,
2826 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002827 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 if (!mSupportChannel) {
2829 LOGE("dummy channel cannot be created");
2830 pthread_mutex_unlock(&mMutex);
2831 return -ENOMEM;
2832 }
2833 }
2834
2835 if (mSupportChannel) {
2836 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2837 QCamera3SupportChannel::kDim;
2838 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2839 CAM_STREAM_TYPE_CALLBACK;
2840 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2841 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2842 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2843 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2844 gCamCapability[mCameraId]->color_arrangement);
2845 mStreamConfigInfo.num_streams++;
2846 }
2847
2848 if (mRawDumpChannel) {
2849 cam_dimension_t rawSize;
2850 rawSize = getMaxRawSize(mCameraId);
2851 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2852 rawSize;
2853 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2854 CAM_STREAM_TYPE_RAW;
2855 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2856 CAM_QCOM_FEATURE_NONE;
2857 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2858 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2859 gCamCapability[mCameraId]->color_arrangement);
2860 mStreamConfigInfo.num_streams++;
2861 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002862
2863 if (mHdrPlusRawSrcChannel) {
2864 cam_dimension_t rawSize;
2865 rawSize = getMaxRawSize(mCameraId);
2866 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2867 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2868 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2869 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2870 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2871 gCamCapability[mCameraId]->color_arrangement);
2872 mStreamConfigInfo.num_streams++;
2873 }
2874
Thierry Strudel3d639192016-09-09 11:52:26 -07002875 /* In HFR mode, if video stream is not added, create a dummy channel so that
2876 * ISP can create a batch mode even for preview only case. This channel is
2877 * never 'start'ed (no stream-on), it is only 'initialized' */
2878 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2879 !m_bIsVideo) {
2880 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2881 setPAAFSupport(dummyFeatureMask,
2882 CAM_STREAM_TYPE_VIDEO,
2883 gCamCapability[mCameraId]->color_arrangement);
2884 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2885 mChannelHandle,
2886 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002887 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002888 this,
2889 &mDummyBatchStream,
2890 CAM_STREAM_TYPE_VIDEO,
2891 dummyFeatureMask,
2892 mMetadataChannel);
2893 if (NULL == mDummyBatchChannel) {
2894 LOGE("creation of mDummyBatchChannel failed."
2895 "Preview will use non-hfr sensor mode ");
2896 }
2897 }
2898 if (mDummyBatchChannel) {
2899 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2900 mDummyBatchStream.width;
2901 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2902 mDummyBatchStream.height;
2903 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2904 CAM_STREAM_TYPE_VIDEO;
2905 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2906 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2907 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2908 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2909 gCamCapability[mCameraId]->color_arrangement);
2910 mStreamConfigInfo.num_streams++;
2911 }
2912
2913 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2914 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002915 m_bIs4KVideo ? 0 :
2916 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002917
2918 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2919 for (pendingRequestIterator i = mPendingRequestsList.begin();
2920 i != mPendingRequestsList.end();) {
2921 i = erasePendingRequest(i);
2922 }
2923 mPendingFrameDropList.clear();
2924 // Initialize/Reset the pending buffers list
2925 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2926 req.mPendingBufferList.clear();
2927 }
2928 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2929
Thierry Strudel3d639192016-09-09 11:52:26 -07002930 mCurJpegMeta.clear();
2931 //Get min frame duration for this streams configuration
2932 deriveMinFrameDuration();
2933
Chien-Yu Chenee335912017-02-09 17:53:20 -08002934 mFirstPreviewIntentSeen = false;
2935
2936 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002937 {
2938 Mutex::Autolock l(gHdrPlusClientLock);
2939 disableHdrPlusModeLocked();
2940 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002941
Thierry Strudel3d639192016-09-09 11:52:26 -07002942 // Update state
2943 mState = CONFIGURED;
2944
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002945 mFirstMetadataCallback = true;
2946
Thierry Strudel3d639192016-09-09 11:52:26 -07002947 pthread_mutex_unlock(&mMutex);
2948
2949 return rc;
2950}
2951
2952/*===========================================================================
2953 * FUNCTION : validateCaptureRequest
2954 *
2955 * DESCRIPTION: validate a capture request from camera service
2956 *
2957 * PARAMETERS :
2958 * @request : request from framework to process
2959 *
2960 * RETURN :
2961 *
2962 *==========================================================================*/
2963int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002964 camera3_capture_request_t *request,
2965 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002966{
2967 ssize_t idx = 0;
2968 const camera3_stream_buffer_t *b;
2969 CameraMetadata meta;
2970
2971 /* Sanity check the request */
2972 if (request == NULL) {
2973 LOGE("NULL capture request");
2974 return BAD_VALUE;
2975 }
2976
2977 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2978 /*settings cannot be null for the first request*/
2979 return BAD_VALUE;
2980 }
2981
2982 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002983 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2984 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002985 LOGE("Request %d: No output buffers provided!",
2986 __FUNCTION__, frameNumber);
2987 return BAD_VALUE;
2988 }
2989 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2990 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2991 request->num_output_buffers, MAX_NUM_STREAMS);
2992 return BAD_VALUE;
2993 }
2994 if (request->input_buffer != NULL) {
2995 b = request->input_buffer;
2996 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2997 LOGE("Request %d: Buffer %ld: Status not OK!",
2998 frameNumber, (long)idx);
2999 return BAD_VALUE;
3000 }
3001 if (b->release_fence != -1) {
3002 LOGE("Request %d: Buffer %ld: Has a release fence!",
3003 frameNumber, (long)idx);
3004 return BAD_VALUE;
3005 }
3006 if (b->buffer == NULL) {
3007 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3008 frameNumber, (long)idx);
3009 return BAD_VALUE;
3010 }
3011 }
3012
3013 // Validate all buffers
3014 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003015 if (b == NULL) {
3016 return BAD_VALUE;
3017 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003018 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003019 QCamera3ProcessingChannel *channel =
3020 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3021 if (channel == NULL) {
3022 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3023 frameNumber, (long)idx);
3024 return BAD_VALUE;
3025 }
3026 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3027 LOGE("Request %d: Buffer %ld: Status not OK!",
3028 frameNumber, (long)idx);
3029 return BAD_VALUE;
3030 }
3031 if (b->release_fence != -1) {
3032 LOGE("Request %d: Buffer %ld: Has a release fence!",
3033 frameNumber, (long)idx);
3034 return BAD_VALUE;
3035 }
3036 if (b->buffer == NULL) {
3037 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3038 frameNumber, (long)idx);
3039 return BAD_VALUE;
3040 }
3041 if (*(b->buffer) == NULL) {
3042 LOGE("Request %d: Buffer %ld: NULL private handle!",
3043 frameNumber, (long)idx);
3044 return BAD_VALUE;
3045 }
3046 idx++;
3047 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003048 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003049 return NO_ERROR;
3050}
3051
3052/*===========================================================================
3053 * FUNCTION : deriveMinFrameDuration
3054 *
3055 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3056 * on currently configured streams.
3057 *
3058 * PARAMETERS : NONE
3059 *
3060 * RETURN : NONE
3061 *
3062 *==========================================================================*/
3063void QCamera3HardwareInterface::deriveMinFrameDuration()
3064{
3065 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3066
3067 maxJpegDim = 0;
3068 maxProcessedDim = 0;
3069 maxRawDim = 0;
3070
3071 // Figure out maximum jpeg, processed, and raw dimensions
3072 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3073 it != mStreamInfo.end(); it++) {
3074
3075 // Input stream doesn't have valid stream_type
3076 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3077 continue;
3078
3079 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3080 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3081 if (dimension > maxJpegDim)
3082 maxJpegDim = dimension;
3083 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3084 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3085 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3086 if (dimension > maxRawDim)
3087 maxRawDim = dimension;
3088 } else {
3089 if (dimension > maxProcessedDim)
3090 maxProcessedDim = dimension;
3091 }
3092 }
3093
3094 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3095 MAX_SIZES_CNT);
3096
3097 //Assume all jpeg dimensions are in processed dimensions.
3098 if (maxJpegDim > maxProcessedDim)
3099 maxProcessedDim = maxJpegDim;
3100 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3101 if (maxProcessedDim > maxRawDim) {
3102 maxRawDim = INT32_MAX;
3103
3104 for (size_t i = 0; i < count; i++) {
3105 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3106 gCamCapability[mCameraId]->raw_dim[i].height;
3107 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3108 maxRawDim = dimension;
3109 }
3110 }
3111
3112 //Find minimum durations for processed, jpeg, and raw
3113 for (size_t i = 0; i < count; i++) {
3114 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3115 gCamCapability[mCameraId]->raw_dim[i].height) {
3116 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3117 break;
3118 }
3119 }
3120 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3121 for (size_t i = 0; i < count; i++) {
3122 if (maxProcessedDim ==
3123 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3124 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3125 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3126 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3127 break;
3128 }
3129 }
3130}
3131
3132/*===========================================================================
3133 * FUNCTION : getMinFrameDuration
3134 *
3135 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3136 * and current request configuration.
3137 *
3138 * PARAMETERS : @request: requset sent by the frameworks
3139 *
3140 * RETURN : min farme duration for a particular request
3141 *
3142 *==========================================================================*/
3143int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3144{
3145 bool hasJpegStream = false;
3146 bool hasRawStream = false;
3147 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3148 const camera3_stream_t *stream = request->output_buffers[i].stream;
3149 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3150 hasJpegStream = true;
3151 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3152 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3153 stream->format == HAL_PIXEL_FORMAT_RAW16)
3154 hasRawStream = true;
3155 }
3156
3157 if (!hasJpegStream)
3158 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3159 else
3160 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3161}
3162
3163/*===========================================================================
3164 * FUNCTION : handleBuffersDuringFlushLock
3165 *
3166 * DESCRIPTION: Account for buffers returned from back-end during flush
3167 * This function is executed while mMutex is held by the caller.
3168 *
3169 * PARAMETERS :
3170 * @buffer: image buffer for the callback
3171 *
3172 * RETURN :
3173 *==========================================================================*/
3174void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3175{
3176 bool buffer_found = false;
3177 for (List<PendingBuffersInRequest>::iterator req =
3178 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3179 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3180 for (List<PendingBufferInfo>::iterator i =
3181 req->mPendingBufferList.begin();
3182 i != req->mPendingBufferList.end(); i++) {
3183 if (i->buffer == buffer->buffer) {
3184 mPendingBuffersMap.numPendingBufsAtFlush--;
3185 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3186 buffer->buffer, req->frame_number,
3187 mPendingBuffersMap.numPendingBufsAtFlush);
3188 buffer_found = true;
3189 break;
3190 }
3191 }
3192 if (buffer_found) {
3193 break;
3194 }
3195 }
3196 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3197 //signal the flush()
3198 LOGD("All buffers returned to HAL. Continue flush");
3199 pthread_cond_signal(&mBuffersCond);
3200 }
3201}
3202
Thierry Strudel3d639192016-09-09 11:52:26 -07003203/*===========================================================================
3204 * FUNCTION : handleBatchMetadata
3205 *
3206 * DESCRIPTION: Handles metadata buffer callback in batch mode
3207 *
3208 * PARAMETERS : @metadata_buf: metadata buffer
3209 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3210 * the meta buf in this method
3211 *
3212 * RETURN :
3213 *
3214 *==========================================================================*/
3215void QCamera3HardwareInterface::handleBatchMetadata(
3216 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3217{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003218 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003219
3220 if (NULL == metadata_buf) {
3221 LOGE("metadata_buf is NULL");
3222 return;
3223 }
3224 /* In batch mode, the metdata will contain the frame number and timestamp of
3225 * the last frame in the batch. Eg: a batch containing buffers from request
3226 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3227 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3228 * multiple process_capture_results */
3229 metadata_buffer_t *metadata =
3230 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3231 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3232 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3233 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3234 uint32_t frame_number = 0, urgent_frame_number = 0;
3235 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3236 bool invalid_metadata = false;
3237 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3238 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003239 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003240
3241 int32_t *p_frame_number_valid =
3242 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3243 uint32_t *p_frame_number =
3244 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3245 int64_t *p_capture_time =
3246 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3247 int32_t *p_urgent_frame_number_valid =
3248 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3249 uint32_t *p_urgent_frame_number =
3250 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3251
3252 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3253 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3254 (NULL == p_urgent_frame_number)) {
3255 LOGE("Invalid metadata");
3256 invalid_metadata = true;
3257 } else {
3258 frame_number_valid = *p_frame_number_valid;
3259 last_frame_number = *p_frame_number;
3260 last_frame_capture_time = *p_capture_time;
3261 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3262 last_urgent_frame_number = *p_urgent_frame_number;
3263 }
3264
3265 /* In batchmode, when no video buffers are requested, set_parms are sent
3266 * for every capture_request. The difference between consecutive urgent
3267 * frame numbers and frame numbers should be used to interpolate the
3268 * corresponding frame numbers and time stamps */
3269 pthread_mutex_lock(&mMutex);
3270 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003271 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3272 if(idx < 0) {
3273 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3274 last_urgent_frame_number);
3275 mState = ERROR;
3276 pthread_mutex_unlock(&mMutex);
3277 return;
3278 }
3279 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003280 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3281 first_urgent_frame_number;
3282
3283 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3284 urgent_frame_number_valid,
3285 first_urgent_frame_number, last_urgent_frame_number);
3286 }
3287
3288 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003289 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3290 if(idx < 0) {
3291 LOGE("Invalid frame number received: %d. Irrecoverable error",
3292 last_frame_number);
3293 mState = ERROR;
3294 pthread_mutex_unlock(&mMutex);
3295 return;
3296 }
3297 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003298 frameNumDiff = last_frame_number + 1 -
3299 first_frame_number;
3300 mPendingBatchMap.removeItem(last_frame_number);
3301
3302 LOGD("frm: valid: %d frm_num: %d - %d",
3303 frame_number_valid,
3304 first_frame_number, last_frame_number);
3305
3306 }
3307 pthread_mutex_unlock(&mMutex);
3308
3309 if (urgent_frame_number_valid || frame_number_valid) {
3310 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3311 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3312 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3313 urgentFrameNumDiff, last_urgent_frame_number);
3314 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3315 LOGE("frameNumDiff: %d frameNum: %d",
3316 frameNumDiff, last_frame_number);
3317 }
3318
3319 for (size_t i = 0; i < loopCount; i++) {
3320 /* handleMetadataWithLock is called even for invalid_metadata for
3321 * pipeline depth calculation */
3322 if (!invalid_metadata) {
3323 /* Infer frame number. Batch metadata contains frame number of the
3324 * last frame */
3325 if (urgent_frame_number_valid) {
3326 if (i < urgentFrameNumDiff) {
3327 urgent_frame_number =
3328 first_urgent_frame_number + i;
3329 LOGD("inferred urgent frame_number: %d",
3330 urgent_frame_number);
3331 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3332 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3333 } else {
3334 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3335 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3336 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3337 }
3338 }
3339
3340 /* Infer frame number. Batch metadata contains frame number of the
3341 * last frame */
3342 if (frame_number_valid) {
3343 if (i < frameNumDiff) {
3344 frame_number = first_frame_number + i;
3345 LOGD("inferred frame_number: %d", frame_number);
3346 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3347 CAM_INTF_META_FRAME_NUMBER, frame_number);
3348 } else {
3349 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3350 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3351 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3352 }
3353 }
3354
3355 if (last_frame_capture_time) {
3356 //Infer timestamp
3357 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003358 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003359 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003360 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003361 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3362 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3363 LOGD("batch capture_time: %lld, capture_time: %lld",
3364 last_frame_capture_time, capture_time);
3365 }
3366 }
3367 pthread_mutex_lock(&mMutex);
3368 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003369 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003370 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3371 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003372 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003373 pthread_mutex_unlock(&mMutex);
3374 }
3375
3376 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003377 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003378 mMetadataChannel->bufDone(metadata_buf);
3379 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003380 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003381 }
3382}
3383
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003384void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3385 camera3_error_msg_code_t errorCode)
3386{
3387 camera3_notify_msg_t notify_msg;
3388 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3389 notify_msg.type = CAMERA3_MSG_ERROR;
3390 notify_msg.message.error.error_code = errorCode;
3391 notify_msg.message.error.error_stream = NULL;
3392 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003393 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003394
3395 return;
3396}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003397
3398/*===========================================================================
3399 * FUNCTION : sendPartialMetadataWithLock
3400 *
3401 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3402 *
3403 * PARAMETERS : @metadata: metadata buffer
3404 * @requestIter: The iterator for the pending capture request for
3405 * which the partial result is being sen
3406 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3407 * last urgent metadata in a batch. Always true for non-batch mode
3408 *
3409 * RETURN :
3410 *
3411 *==========================================================================*/
3412
3413void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3414 metadata_buffer_t *metadata,
3415 const pendingRequestIterator requestIter,
3416 bool lastUrgentMetadataInBatch)
3417{
3418 camera3_capture_result_t result;
3419 memset(&result, 0, sizeof(camera3_capture_result_t));
3420
3421 requestIter->partial_result_cnt++;
3422
3423 // Extract 3A metadata
3424 result.result = translateCbUrgentMetadataToResultMetadata(
3425 metadata, lastUrgentMetadataInBatch);
3426 // Populate metadata result
3427 result.frame_number = requestIter->frame_number;
3428 result.num_output_buffers = 0;
3429 result.output_buffers = NULL;
3430 result.partial_result = requestIter->partial_result_cnt;
3431
3432 {
3433 Mutex::Autolock l(gHdrPlusClientLock);
3434 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3435 // Notify HDR+ client about the partial metadata.
3436 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3437 result.partial_result == PARTIAL_RESULT_COUNT);
3438 }
3439 }
3440
3441 orchestrateResult(&result);
3442 LOGD("urgent frame_number = %u", result.frame_number);
3443 free_camera_metadata((camera_metadata_t *)result.result);
3444}
3445
Thierry Strudel3d639192016-09-09 11:52:26 -07003446/*===========================================================================
3447 * FUNCTION : handleMetadataWithLock
3448 *
3449 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3450 *
3451 * PARAMETERS : @metadata_buf: metadata buffer
3452 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3453 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003454 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3455 * last urgent metadata in a batch. Always true for non-batch mode
3456 * @lastMetadataInBatch: Boolean to indicate whether this is the
3457 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003458 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3459 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003460 *
3461 * RETURN :
3462 *
3463 *==========================================================================*/
3464void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003465 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003466 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3467 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003468{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003469 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003470 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3471 //during flush do not send metadata from this thread
3472 LOGD("not sending metadata during flush or when mState is error");
3473 if (free_and_bufdone_meta_buf) {
3474 mMetadataChannel->bufDone(metadata_buf);
3475 free(metadata_buf);
3476 }
3477 return;
3478 }
3479
3480 //not in flush
3481 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3482 int32_t frame_number_valid, urgent_frame_number_valid;
3483 uint32_t frame_number, urgent_frame_number;
3484 int64_t capture_time;
3485 nsecs_t currentSysTime;
3486
3487 int32_t *p_frame_number_valid =
3488 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3489 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3490 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3491 int32_t *p_urgent_frame_number_valid =
3492 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3493 uint32_t *p_urgent_frame_number =
3494 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3495 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3496 metadata) {
3497 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3498 *p_frame_number_valid, *p_frame_number);
3499 }
3500
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003501 camera_metadata_t *resultMetadata = nullptr;
3502
Thierry Strudel3d639192016-09-09 11:52:26 -07003503 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3504 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3505 LOGE("Invalid metadata");
3506 if (free_and_bufdone_meta_buf) {
3507 mMetadataChannel->bufDone(metadata_buf);
3508 free(metadata_buf);
3509 }
3510 goto done_metadata;
3511 }
3512 frame_number_valid = *p_frame_number_valid;
3513 frame_number = *p_frame_number;
3514 capture_time = *p_capture_time;
3515 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3516 urgent_frame_number = *p_urgent_frame_number;
3517 currentSysTime = systemTime(CLOCK_MONOTONIC);
3518
3519 // Detect if buffers from any requests are overdue
3520 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003521 int64_t timeout;
3522 {
3523 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3524 // If there is a pending HDR+ request, the following requests may be blocked until the
3525 // HDR+ request is done. So allow a longer timeout.
3526 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3527 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3528 }
3529
3530 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003531 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003532 assert(missed.stream->priv);
3533 if (missed.stream->priv) {
3534 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3535 assert(ch->mStreams[0]);
3536 if (ch->mStreams[0]) {
3537 LOGE("Cancel missing frame = %d, buffer = %p,"
3538 "stream type = %d, stream format = %d",
3539 req.frame_number, missed.buffer,
3540 ch->mStreams[0]->getMyType(), missed.stream->format);
3541 ch->timeoutFrame(req.frame_number);
3542 }
3543 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003544 }
3545 }
3546 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003547 //For the very first metadata callback, regardless whether it contains valid
3548 //frame number, send the partial metadata for the jumpstarting requests.
3549 //Note that this has to be done even if the metadata doesn't contain valid
3550 //urgent frame number, because in the case only 1 request is ever submitted
3551 //to HAL, there won't be subsequent valid urgent frame number.
3552 if (mFirstMetadataCallback) {
3553 for (pendingRequestIterator i =
3554 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3555 if (i->bUseFirstPartial) {
3556 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3557 }
3558 }
3559 mFirstMetadataCallback = false;
3560 }
3561
Thierry Strudel3d639192016-09-09 11:52:26 -07003562 //Partial result on process_capture_result for timestamp
3563 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003564 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003565
3566 //Recieved an urgent Frame Number, handle it
3567 //using partial results
3568 for (pendingRequestIterator i =
3569 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3570 LOGD("Iterator Frame = %d urgent frame = %d",
3571 i->frame_number, urgent_frame_number);
3572
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003573 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003574 (i->partial_result_cnt == 0)) {
3575 LOGE("Error: HAL missed urgent metadata for frame number %d",
3576 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003577 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003578 }
3579
3580 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003581 i->partial_result_cnt == 0) {
3582 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003583 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3584 // Instant AEC settled for this frame.
3585 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3586 mInstantAECSettledFrameNumber = urgent_frame_number;
3587 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003588 break;
3589 }
3590 }
3591 }
3592
3593 if (!frame_number_valid) {
3594 LOGD("Not a valid normal frame number, used as SOF only");
3595 if (free_and_bufdone_meta_buf) {
3596 mMetadataChannel->bufDone(metadata_buf);
3597 free(metadata_buf);
3598 }
3599 goto done_metadata;
3600 }
3601 LOGH("valid frame_number = %u, capture_time = %lld",
3602 frame_number, capture_time);
3603
Emilian Peev7650c122017-01-19 08:24:33 -08003604 if (metadata->is_depth_data_valid) {
3605 handleDepthDataLocked(metadata->depth_data, frame_number);
3606 }
3607
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003608 // Check whether any stream buffer corresponding to this is dropped or not
3609 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3610 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3611 for (auto & pendingRequest : mPendingRequestsList) {
3612 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3613 mInstantAECSettledFrameNumber)) {
3614 camera3_notify_msg_t notify_msg = {};
3615 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003616 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003617 QCamera3ProcessingChannel *channel =
3618 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003619 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003620 if (p_cam_frame_drop) {
3621 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003622 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003623 // Got the stream ID for drop frame.
3624 dropFrame = true;
3625 break;
3626 }
3627 }
3628 } else {
3629 // This is instant AEC case.
3630 // For instant AEC drop the stream untill AEC is settled.
3631 dropFrame = true;
3632 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003633
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003634 if (dropFrame) {
3635 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3636 if (p_cam_frame_drop) {
3637 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003638 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003639 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003640 } else {
3641 // For instant AEC, inform frame drop and frame number
3642 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3643 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003644 pendingRequest.frame_number, streamID,
3645 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003646 }
3647 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003648 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003649 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003650 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003651 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003652 if (p_cam_frame_drop) {
3653 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003654 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003655 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003656 } else {
3657 // For instant AEC, inform frame drop and frame number
3658 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3659 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003660 pendingRequest.frame_number, streamID,
3661 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003662 }
3663 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003664 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003665 PendingFrameDrop.stream_ID = streamID;
3666 // Add the Frame drop info to mPendingFrameDropList
3667 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003668 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003669 }
3670 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003671 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003672
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003673 for (auto & pendingRequest : mPendingRequestsList) {
3674 // Find the pending request with the frame number.
3675 if (pendingRequest.frame_number == frame_number) {
3676 // Update the sensor timestamp.
3677 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003678
Thierry Strudel3d639192016-09-09 11:52:26 -07003679
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003680 /* Set the timestamp in display metadata so that clients aware of
3681 private_handle such as VT can use this un-modified timestamps.
3682 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003683 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003684
Thierry Strudel3d639192016-09-09 11:52:26 -07003685 // Find channel requiring metadata, meaning internal offline postprocess
3686 // is needed.
3687 //TODO: for now, we don't support two streams requiring metadata at the same time.
3688 // (because we are not making copies, and metadata buffer is not reference counted.
3689 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003690 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3691 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003692 if (iter->need_metadata) {
3693 internalPproc = true;
3694 QCamera3ProcessingChannel *channel =
3695 (QCamera3ProcessingChannel *)iter->stream->priv;
3696 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003697 if(p_is_metabuf_queued != NULL) {
3698 *p_is_metabuf_queued = true;
3699 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003700 break;
3701 }
3702 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003703 for (auto itr = pendingRequest.internalRequestList.begin();
3704 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003705 if (itr->need_metadata) {
3706 internalPproc = true;
3707 QCamera3ProcessingChannel *channel =
3708 (QCamera3ProcessingChannel *)itr->stream->priv;
3709 channel->queueReprocMetadata(metadata_buf);
3710 break;
3711 }
3712 }
3713
Thierry Strudel54dc9782017-02-15 12:12:10 -08003714 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003715
3716 bool *enableZsl = nullptr;
3717 if (gExposeEnableZslKey) {
3718 enableZsl = &pendingRequest.enableZsl;
3719 }
3720
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003721 resultMetadata = translateFromHalMetadata(metadata,
3722 pendingRequest.timestamp, pendingRequest.request_id,
3723 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3724 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003725 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003726 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003727 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003728 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003729 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003730 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003731
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003732 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003733
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003734 if (pendingRequest.blob_request) {
3735 //Dump tuning metadata if enabled and available
3736 char prop[PROPERTY_VALUE_MAX];
3737 memset(prop, 0, sizeof(prop));
3738 property_get("persist.camera.dumpmetadata", prop, "0");
3739 int32_t enabled = atoi(prop);
3740 if (enabled && metadata->is_tuning_params_valid) {
3741 dumpMetadataToFile(metadata->tuning_params,
3742 mMetaFrameCount,
3743 enabled,
3744 "Snapshot",
3745 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003746 }
3747 }
3748
3749 if (!internalPproc) {
3750 LOGD("couldn't find need_metadata for this metadata");
3751 // Return metadata buffer
3752 if (free_and_bufdone_meta_buf) {
3753 mMetadataChannel->bufDone(metadata_buf);
3754 free(metadata_buf);
3755 }
3756 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003757
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003758 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003759 }
3760 }
3761
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003762 // Try to send out shutter callbacks and capture results.
3763 handlePendingResultsWithLock(frame_number, resultMetadata);
3764 return;
3765
Thierry Strudel3d639192016-09-09 11:52:26 -07003766done_metadata:
3767 for (pendingRequestIterator i = mPendingRequestsList.begin();
3768 i != mPendingRequestsList.end() ;i++) {
3769 i->pipeline_depth++;
3770 }
3771 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3772 unblockRequestIfNecessary();
3773}
3774
3775/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003776 * FUNCTION : handleDepthDataWithLock
3777 *
3778 * DESCRIPTION: Handles incoming depth data
3779 *
3780 * PARAMETERS : @depthData : Depth data
3781 * @frameNumber: Frame number of the incoming depth data
3782 *
3783 * RETURN :
3784 *
3785 *==========================================================================*/
3786void QCamera3HardwareInterface::handleDepthDataLocked(
3787 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3788 uint32_t currentFrameNumber;
3789 buffer_handle_t *depthBuffer;
3790
3791 if (nullptr == mDepthChannel) {
3792 LOGE("Depth channel not present!");
3793 return;
3794 }
3795
3796 camera3_stream_buffer_t resultBuffer =
3797 {.acquire_fence = -1,
3798 .release_fence = -1,
3799 .status = CAMERA3_BUFFER_STATUS_OK,
3800 .buffer = nullptr,
3801 .stream = mDepthChannel->getStream()};
3802 camera3_capture_result_t result =
3803 {.result = nullptr,
3804 .num_output_buffers = 1,
3805 .output_buffers = &resultBuffer,
3806 .partial_result = 0,
3807 .frame_number = 0};
3808
3809 do {
3810 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3811 if (nullptr == depthBuffer) {
3812 break;
3813 }
3814
3815 result.frame_number = currentFrameNumber;
3816 resultBuffer.buffer = depthBuffer;
3817 if (currentFrameNumber == frameNumber) {
3818 int32_t rc = mDepthChannel->populateDepthData(depthData,
3819 frameNumber);
3820 if (NO_ERROR != rc) {
3821 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3822 } else {
3823 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3824 }
3825 } else if (currentFrameNumber > frameNumber) {
3826 break;
3827 } else {
3828 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3829 {{currentFrameNumber, mDepthChannel->getStream(),
3830 CAMERA3_MSG_ERROR_BUFFER}}};
3831 orchestrateNotify(&notify_msg);
3832
3833 LOGE("Depth buffer for frame number: %d is missing "
3834 "returning back!", currentFrameNumber);
3835 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3836 }
3837 mDepthChannel->unmapBuffer(currentFrameNumber);
3838
3839 orchestrateResult(&result);
3840 } while (currentFrameNumber < frameNumber);
3841}
3842
3843/*===========================================================================
3844 * FUNCTION : notifyErrorFoPendingDepthData
3845 *
3846 * DESCRIPTION: Returns error for any pending depth buffers
3847 *
3848 * PARAMETERS : depthCh - depth channel that needs to get flushed
3849 *
3850 * RETURN :
3851 *
3852 *==========================================================================*/
3853void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3854 QCamera3DepthChannel *depthCh) {
3855 uint32_t currentFrameNumber;
3856 buffer_handle_t *depthBuffer;
3857
3858 if (nullptr == depthCh) {
3859 return;
3860 }
3861
3862 camera3_notify_msg_t notify_msg =
3863 {.type = CAMERA3_MSG_ERROR,
3864 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3865 camera3_stream_buffer_t resultBuffer =
3866 {.acquire_fence = -1,
3867 .release_fence = -1,
3868 .buffer = nullptr,
3869 .stream = depthCh->getStream(),
3870 .status = CAMERA3_BUFFER_STATUS_ERROR};
3871 camera3_capture_result_t result =
3872 {.result = nullptr,
3873 .frame_number = 0,
3874 .num_output_buffers = 1,
3875 .partial_result = 0,
3876 .output_buffers = &resultBuffer};
3877
3878 while (nullptr !=
3879 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3880 depthCh->unmapBuffer(currentFrameNumber);
3881
3882 notify_msg.message.error.frame_number = currentFrameNumber;
3883 orchestrateNotify(&notify_msg);
3884
3885 resultBuffer.buffer = depthBuffer;
3886 result.frame_number = currentFrameNumber;
3887 orchestrateResult(&result);
3888 };
3889}
3890
3891/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003892 * FUNCTION : hdrPlusPerfLock
3893 *
3894 * DESCRIPTION: perf lock for HDR+ using custom intent
3895 *
3896 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3897 *
3898 * RETURN : None
3899 *
3900 *==========================================================================*/
3901void QCamera3HardwareInterface::hdrPlusPerfLock(
3902 mm_camera_super_buf_t *metadata_buf)
3903{
3904 if (NULL == metadata_buf) {
3905 LOGE("metadata_buf is NULL");
3906 return;
3907 }
3908 metadata_buffer_t *metadata =
3909 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3910 int32_t *p_frame_number_valid =
3911 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3912 uint32_t *p_frame_number =
3913 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3914
3915 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3916 LOGE("%s: Invalid metadata", __func__);
3917 return;
3918 }
3919
3920 //acquire perf lock for 5 sec after the last HDR frame is captured
3921 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3922 if ((p_frame_number != NULL) &&
3923 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003924 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003925 }
3926 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003927}
3928
3929/*===========================================================================
3930 * FUNCTION : handleInputBufferWithLock
3931 *
3932 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3933 *
3934 * PARAMETERS : @frame_number: frame number of the input buffer
3935 *
3936 * RETURN :
3937 *
3938 *==========================================================================*/
3939void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3940{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003941 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003942 pendingRequestIterator i = mPendingRequestsList.begin();
3943 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3944 i++;
3945 }
3946 if (i != mPendingRequestsList.end() && i->input_buffer) {
3947 //found the right request
3948 if (!i->shutter_notified) {
3949 CameraMetadata settings;
3950 camera3_notify_msg_t notify_msg;
3951 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3952 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3953 if(i->settings) {
3954 settings = i->settings;
3955 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3956 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3957 } else {
3958 LOGE("No timestamp in input settings! Using current one.");
3959 }
3960 } else {
3961 LOGE("Input settings missing!");
3962 }
3963
3964 notify_msg.type = CAMERA3_MSG_SHUTTER;
3965 notify_msg.message.shutter.frame_number = frame_number;
3966 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003967 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003968 i->shutter_notified = true;
3969 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3970 i->frame_number, notify_msg.message.shutter.timestamp);
3971 }
3972
3973 if (i->input_buffer->release_fence != -1) {
3974 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3975 close(i->input_buffer->release_fence);
3976 if (rc != OK) {
3977 LOGE("input buffer sync wait failed %d", rc);
3978 }
3979 }
3980
3981 camera3_capture_result result;
3982 memset(&result, 0, sizeof(camera3_capture_result));
3983 result.frame_number = frame_number;
3984 result.result = i->settings;
3985 result.input_buffer = i->input_buffer;
3986 result.partial_result = PARTIAL_RESULT_COUNT;
3987
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003988 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003989 LOGD("Input request metadata and input buffer frame_number = %u",
3990 i->frame_number);
3991 i = erasePendingRequest(i);
3992 } else {
3993 LOGE("Could not find input request for frame number %d", frame_number);
3994 }
3995}
3996
3997/*===========================================================================
3998 * FUNCTION : handleBufferWithLock
3999 *
4000 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4001 *
4002 * PARAMETERS : @buffer: image buffer for the callback
4003 * @frame_number: frame number of the image buffer
4004 *
4005 * RETURN :
4006 *
4007 *==========================================================================*/
4008void QCamera3HardwareInterface::handleBufferWithLock(
4009 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4010{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004011 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004012
4013 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4014 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4015 }
4016
Thierry Strudel3d639192016-09-09 11:52:26 -07004017 /* Nothing to be done during error state */
4018 if ((ERROR == mState) || (DEINIT == mState)) {
4019 return;
4020 }
4021 if (mFlushPerf) {
4022 handleBuffersDuringFlushLock(buffer);
4023 return;
4024 }
4025 //not in flush
4026 // If the frame number doesn't exist in the pending request list,
4027 // directly send the buffer to the frameworks, and update pending buffers map
4028 // Otherwise, book-keep the buffer.
4029 pendingRequestIterator i = mPendingRequestsList.begin();
4030 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4031 i++;
4032 }
4033 if (i == mPendingRequestsList.end()) {
4034 // Verify all pending requests frame_numbers are greater
4035 for (pendingRequestIterator j = mPendingRequestsList.begin();
4036 j != mPendingRequestsList.end(); j++) {
4037 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
4038 LOGW("Error: pending live frame number %d is smaller than %d",
4039 j->frame_number, frame_number);
4040 }
4041 }
4042 camera3_capture_result_t result;
4043 memset(&result, 0, sizeof(camera3_capture_result_t));
4044 result.result = NULL;
4045 result.frame_number = frame_number;
4046 result.num_output_buffers = 1;
4047 result.partial_result = 0;
4048 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4049 m != mPendingFrameDropList.end(); m++) {
4050 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4051 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4052 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4053 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4054 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4055 frame_number, streamID);
4056 m = mPendingFrameDropList.erase(m);
4057 break;
4058 }
4059 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004060 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07004061 result.output_buffers = buffer;
4062 LOGH("result frame_number = %d, buffer = %p",
4063 frame_number, buffer->buffer);
4064
4065 mPendingBuffersMap.removeBuf(buffer->buffer);
4066
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004067 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004068 } else {
4069 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004070 if (i->input_buffer->release_fence != -1) {
4071 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4072 close(i->input_buffer->release_fence);
4073 if (rc != OK) {
4074 LOGE("input buffer sync wait failed %d", rc);
4075 }
4076 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004077 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004078
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004079 // Put buffer into the pending request
4080 for (auto &requestedBuffer : i->buffers) {
4081 if (requestedBuffer.stream == buffer->stream) {
4082 if (requestedBuffer.buffer != nullptr) {
4083 LOGE("Error: buffer is already set");
4084 } else {
4085 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4086 sizeof(camera3_stream_buffer_t));
4087 *(requestedBuffer.buffer) = *buffer;
4088 LOGH("cache buffer %p at result frame_number %u",
4089 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004090 }
4091 }
4092 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004093
4094 if (i->input_buffer) {
4095 // For a reprocessing request, try to send out shutter callback and result metadata.
4096 handlePendingResultsWithLock(frame_number, nullptr);
4097 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004098 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004099
4100 if (mPreviewStarted == false) {
4101 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4102 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004103 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4104
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004105 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4106 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4107 mPreviewStarted = true;
4108
4109 // Set power hint for preview
4110 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4111 }
4112 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004113}
4114
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004115void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4116 const camera_metadata_t *resultMetadata)
4117{
4118 // Find the pending request for this result metadata.
4119 auto requestIter = mPendingRequestsList.begin();
4120 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4121 requestIter++;
4122 }
4123
4124 if (requestIter == mPendingRequestsList.end()) {
4125 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4126 return;
4127 }
4128
4129 // Update the result metadata
4130 requestIter->resultMetadata = resultMetadata;
4131
4132 // Check what type of request this is.
4133 bool liveRequest = false;
4134 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004135 // HDR+ request doesn't have partial results.
4136 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004137 } else if (requestIter->input_buffer != nullptr) {
4138 // Reprocessing request result is the same as settings.
4139 requestIter->resultMetadata = requestIter->settings;
4140 // Reprocessing request doesn't have partial results.
4141 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4142 } else {
4143 liveRequest = true;
4144 requestIter->partial_result_cnt++;
4145 mPendingLiveRequest--;
4146
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004147 {
4148 Mutex::Autolock l(gHdrPlusClientLock);
4149 // For a live request, send the metadata to HDR+ client.
4150 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4151 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4152 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4153 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004154 }
4155 }
4156
4157 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4158 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4159 bool readyToSend = true;
4160
4161 // Iterate through the pending requests to send out shutter callbacks and results that are
4162 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4163 // live requests that don't have result metadata yet.
4164 auto iter = mPendingRequestsList.begin();
4165 while (iter != mPendingRequestsList.end()) {
4166 // Check if current pending request is ready. If it's not ready, the following pending
4167 // requests are also not ready.
4168 if (readyToSend && iter->resultMetadata == nullptr) {
4169 readyToSend = false;
4170 }
4171
4172 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4173
4174 std::vector<camera3_stream_buffer_t> outputBuffers;
4175
4176 camera3_capture_result_t result = {};
4177 result.frame_number = iter->frame_number;
4178 result.result = iter->resultMetadata;
4179 result.partial_result = iter->partial_result_cnt;
4180
4181 // If this pending buffer has result metadata, we may be able to send out shutter callback
4182 // and result metadata.
4183 if (iter->resultMetadata != nullptr) {
4184 if (!readyToSend) {
4185 // If any of the previous pending request is not ready, this pending request is
4186 // also not ready to send in order to keep shutter callbacks and result metadata
4187 // in order.
4188 iter++;
4189 continue;
4190 }
4191
4192 // Invoke shutter callback if not yet.
4193 if (!iter->shutter_notified) {
4194 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4195
4196 // Find the timestamp in HDR+ result metadata
4197 camera_metadata_ro_entry_t entry;
4198 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4199 ANDROID_SENSOR_TIMESTAMP, &entry);
4200 if (res != OK) {
4201 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4202 __FUNCTION__, iter->frame_number, strerror(-res), res);
4203 } else {
4204 timestamp = entry.data.i64[0];
4205 }
4206
4207 camera3_notify_msg_t notify_msg = {};
4208 notify_msg.type = CAMERA3_MSG_SHUTTER;
4209 notify_msg.message.shutter.frame_number = iter->frame_number;
4210 notify_msg.message.shutter.timestamp = timestamp;
4211 orchestrateNotify(&notify_msg);
4212 iter->shutter_notified = true;
4213 }
4214
4215 result.input_buffer = iter->input_buffer;
4216
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004217 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4218 // If the result metadata belongs to a live request, notify errors for previous pending
4219 // live requests.
4220 mPendingLiveRequest--;
4221
4222 CameraMetadata dummyMetadata;
4223 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4224 result.result = dummyMetadata.release();
4225
4226 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004227
4228 // partial_result should be PARTIAL_RESULT_CNT in case of
4229 // ERROR_RESULT.
4230 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4231 result.partial_result = PARTIAL_RESULT_COUNT;
4232
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004233 } else {
4234 iter++;
4235 continue;
4236 }
4237
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004238 // Prepare output buffer array
4239 for (auto bufferInfoIter = iter->buffers.begin();
4240 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4241 if (bufferInfoIter->buffer != nullptr) {
4242
4243 QCamera3Channel *channel =
4244 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4245 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4246
4247 // Check if this buffer is a dropped frame.
4248 auto frameDropIter = mPendingFrameDropList.begin();
4249 while (frameDropIter != mPendingFrameDropList.end()) {
4250 if((frameDropIter->stream_ID == streamID) &&
4251 (frameDropIter->frame_number == frameNumber)) {
4252 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4253 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4254 streamID);
4255 mPendingFrameDropList.erase(frameDropIter);
4256 break;
4257 } else {
4258 frameDropIter++;
4259 }
4260 }
4261
4262 // Check buffer error status
4263 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4264 bufferInfoIter->buffer->buffer);
4265 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4266
4267 outputBuffers.push_back(*(bufferInfoIter->buffer));
4268 free(bufferInfoIter->buffer);
4269 bufferInfoIter->buffer = NULL;
4270 }
4271 }
4272
4273 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4274 result.num_output_buffers = outputBuffers.size();
4275
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004276 orchestrateResult(&result);
4277
4278 // For reprocessing, result metadata is the same as settings so do not free it here to
4279 // avoid double free.
4280 if (result.result != iter->settings) {
4281 free_camera_metadata((camera_metadata_t *)result.result);
4282 }
4283 iter->resultMetadata = nullptr;
4284 iter = erasePendingRequest(iter);
4285 }
4286
4287 if (liveRequest) {
4288 for (auto &iter : mPendingRequestsList) {
4289 // Increment pipeline depth for the following pending requests.
4290 if (iter.frame_number > frameNumber) {
4291 iter.pipeline_depth++;
4292 }
4293 }
4294 }
4295
4296 unblockRequestIfNecessary();
4297}
4298
Thierry Strudel3d639192016-09-09 11:52:26 -07004299/*===========================================================================
4300 * FUNCTION : unblockRequestIfNecessary
4301 *
4302 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4303 * that mMutex is held when this function is called.
4304 *
4305 * PARAMETERS :
4306 *
4307 * RETURN :
4308 *
4309 *==========================================================================*/
4310void QCamera3HardwareInterface::unblockRequestIfNecessary()
4311{
4312 // Unblock process_capture_request
4313 pthread_cond_signal(&mRequestCond);
4314}
4315
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004316/*===========================================================================
4317 * FUNCTION : isHdrSnapshotRequest
4318 *
4319 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4320 *
4321 * PARAMETERS : camera3 request structure
4322 *
4323 * RETURN : boolean decision variable
4324 *
4325 *==========================================================================*/
4326bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4327{
4328 if (request == NULL) {
4329 LOGE("Invalid request handle");
4330 assert(0);
4331 return false;
4332 }
4333
4334 if (!mForceHdrSnapshot) {
4335 CameraMetadata frame_settings;
4336 frame_settings = request->settings;
4337
4338 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4339 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4340 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4341 return false;
4342 }
4343 } else {
4344 return false;
4345 }
4346
4347 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4348 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4349 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4350 return false;
4351 }
4352 } else {
4353 return false;
4354 }
4355 }
4356
4357 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4358 if (request->output_buffers[i].stream->format
4359 == HAL_PIXEL_FORMAT_BLOB) {
4360 return true;
4361 }
4362 }
4363
4364 return false;
4365}
4366/*===========================================================================
4367 * FUNCTION : orchestrateRequest
4368 *
4369 * DESCRIPTION: Orchestrates a capture request from camera service
4370 *
4371 * PARAMETERS :
4372 * @request : request from framework to process
4373 *
4374 * RETURN : Error status codes
4375 *
4376 *==========================================================================*/
4377int32_t QCamera3HardwareInterface::orchestrateRequest(
4378 camera3_capture_request_t *request)
4379{
4380
4381 uint32_t originalFrameNumber = request->frame_number;
4382 uint32_t originalOutputCount = request->num_output_buffers;
4383 const camera_metadata_t *original_settings = request->settings;
4384 List<InternalRequest> internallyRequestedStreams;
4385 List<InternalRequest> emptyInternalList;
4386
4387 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4388 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4389 uint32_t internalFrameNumber;
4390 CameraMetadata modified_meta;
4391
4392
4393 /* Add Blob channel to list of internally requested streams */
4394 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4395 if (request->output_buffers[i].stream->format
4396 == HAL_PIXEL_FORMAT_BLOB) {
4397 InternalRequest streamRequested;
4398 streamRequested.meteringOnly = 1;
4399 streamRequested.need_metadata = 0;
4400 streamRequested.stream = request->output_buffers[i].stream;
4401 internallyRequestedStreams.push_back(streamRequested);
4402 }
4403 }
4404 request->num_output_buffers = 0;
4405 auto itr = internallyRequestedStreams.begin();
4406
4407 /* Modify setting to set compensation */
4408 modified_meta = request->settings;
4409 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4410 uint8_t aeLock = 1;
4411 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4412 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4413 camera_metadata_t *modified_settings = modified_meta.release();
4414 request->settings = modified_settings;
4415
4416 /* Capture Settling & -2x frame */
4417 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4418 request->frame_number = internalFrameNumber;
4419 processCaptureRequest(request, internallyRequestedStreams);
4420
4421 request->num_output_buffers = originalOutputCount;
4422 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4423 request->frame_number = internalFrameNumber;
4424 processCaptureRequest(request, emptyInternalList);
4425 request->num_output_buffers = 0;
4426
4427 modified_meta = modified_settings;
4428 expCompensation = 0;
4429 aeLock = 1;
4430 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4431 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4432 modified_settings = modified_meta.release();
4433 request->settings = modified_settings;
4434
4435 /* Capture Settling & 0X frame */
4436
4437 itr = internallyRequestedStreams.begin();
4438 if (itr == internallyRequestedStreams.end()) {
4439 LOGE("Error Internally Requested Stream list is empty");
4440 assert(0);
4441 } else {
4442 itr->need_metadata = 0;
4443 itr->meteringOnly = 1;
4444 }
4445
4446 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4447 request->frame_number = internalFrameNumber;
4448 processCaptureRequest(request, internallyRequestedStreams);
4449
4450 itr = internallyRequestedStreams.begin();
4451 if (itr == internallyRequestedStreams.end()) {
4452 ALOGE("Error Internally Requested Stream list is empty");
4453 assert(0);
4454 } else {
4455 itr->need_metadata = 1;
4456 itr->meteringOnly = 0;
4457 }
4458
4459 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4460 request->frame_number = internalFrameNumber;
4461 processCaptureRequest(request, internallyRequestedStreams);
4462
4463 /* Capture 2X frame*/
4464 modified_meta = modified_settings;
4465 expCompensation = GB_HDR_2X_STEP_EV;
4466 aeLock = 1;
4467 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4468 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4469 modified_settings = modified_meta.release();
4470 request->settings = modified_settings;
4471
4472 itr = internallyRequestedStreams.begin();
4473 if (itr == internallyRequestedStreams.end()) {
4474 ALOGE("Error Internally Requested Stream list is empty");
4475 assert(0);
4476 } else {
4477 itr->need_metadata = 0;
4478 itr->meteringOnly = 1;
4479 }
4480 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4481 request->frame_number = internalFrameNumber;
4482 processCaptureRequest(request, internallyRequestedStreams);
4483
4484 itr = internallyRequestedStreams.begin();
4485 if (itr == internallyRequestedStreams.end()) {
4486 ALOGE("Error Internally Requested Stream list is empty");
4487 assert(0);
4488 } else {
4489 itr->need_metadata = 1;
4490 itr->meteringOnly = 0;
4491 }
4492
4493 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4494 request->frame_number = internalFrameNumber;
4495 processCaptureRequest(request, internallyRequestedStreams);
4496
4497
4498 /* Capture 2X on original streaming config*/
4499 internallyRequestedStreams.clear();
4500
4501 /* Restore original settings pointer */
4502 request->settings = original_settings;
4503 } else {
4504 uint32_t internalFrameNumber;
4505 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4506 request->frame_number = internalFrameNumber;
4507 return processCaptureRequest(request, internallyRequestedStreams);
4508 }
4509
4510 return NO_ERROR;
4511}
4512
4513/*===========================================================================
4514 * FUNCTION : orchestrateResult
4515 *
4516 * DESCRIPTION: Orchestrates a capture result to camera service
4517 *
4518 * PARAMETERS :
4519 * @request : request from framework to process
4520 *
4521 * RETURN :
4522 *
4523 *==========================================================================*/
4524void QCamera3HardwareInterface::orchestrateResult(
4525 camera3_capture_result_t *result)
4526{
4527 uint32_t frameworkFrameNumber;
4528 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4529 frameworkFrameNumber);
4530 if (rc != NO_ERROR) {
4531 LOGE("Cannot find translated frameworkFrameNumber");
4532 assert(0);
4533 } else {
4534 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004535 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004536 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004537 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004538 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4539 camera_metadata_entry_t entry;
4540 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4541 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004542 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004543 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4544 if (ret != OK)
4545 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004546 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004547 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004548 result->frame_number = frameworkFrameNumber;
4549 mCallbackOps->process_capture_result(mCallbackOps, result);
4550 }
4551 }
4552}
4553
4554/*===========================================================================
4555 * FUNCTION : orchestrateNotify
4556 *
4557 * DESCRIPTION: Orchestrates a notify to camera service
4558 *
4559 * PARAMETERS :
4560 * @request : request from framework to process
4561 *
4562 * RETURN :
4563 *
4564 *==========================================================================*/
4565void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4566{
4567 uint32_t frameworkFrameNumber;
4568 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004569 int32_t rc = NO_ERROR;
4570
4571 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004572 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004573
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004574 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004575 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4576 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4577 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004578 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004579 LOGE("Cannot find translated frameworkFrameNumber");
4580 assert(0);
4581 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004582 }
4583 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004584
4585 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4586 LOGD("Internal Request drop the notifyCb");
4587 } else {
4588 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4589 mCallbackOps->notify(mCallbackOps, notify_msg);
4590 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004591}
4592
4593/*===========================================================================
4594 * FUNCTION : FrameNumberRegistry
4595 *
4596 * DESCRIPTION: Constructor
4597 *
4598 * PARAMETERS :
4599 *
4600 * RETURN :
4601 *
4602 *==========================================================================*/
4603FrameNumberRegistry::FrameNumberRegistry()
4604{
4605 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4606}
4607
4608/*===========================================================================
4609 * FUNCTION : ~FrameNumberRegistry
4610 *
4611 * DESCRIPTION: Destructor
4612 *
4613 * PARAMETERS :
4614 *
4615 * RETURN :
4616 *
4617 *==========================================================================*/
4618FrameNumberRegistry::~FrameNumberRegistry()
4619{
4620}
4621
4622/*===========================================================================
4623 * FUNCTION : PurgeOldEntriesLocked
4624 *
4625 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4626 *
4627 * PARAMETERS :
4628 *
4629 * RETURN : NONE
4630 *
4631 *==========================================================================*/
4632void FrameNumberRegistry::purgeOldEntriesLocked()
4633{
4634 while (_register.begin() != _register.end()) {
4635 auto itr = _register.begin();
4636 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4637 _register.erase(itr);
4638 } else {
4639 return;
4640 }
4641 }
4642}
4643
4644/*===========================================================================
4645 * FUNCTION : allocStoreInternalFrameNumber
4646 *
4647 * DESCRIPTION: Method to note down a framework request and associate a new
4648 * internal request number against it
4649 *
4650 * PARAMETERS :
4651 * @fFrameNumber: Identifier given by framework
4652 * @internalFN : Output parameter which will have the newly generated internal
4653 * entry
4654 *
4655 * RETURN : Error code
4656 *
4657 *==========================================================================*/
4658int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4659 uint32_t &internalFrameNumber)
4660{
4661 Mutex::Autolock lock(mRegistryLock);
4662 internalFrameNumber = _nextFreeInternalNumber++;
4663 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4664 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4665 purgeOldEntriesLocked();
4666 return NO_ERROR;
4667}
4668
4669/*===========================================================================
4670 * FUNCTION : generateStoreInternalFrameNumber
4671 *
4672 * DESCRIPTION: Method to associate a new internal request number independent
4673 * of any associate with framework requests
4674 *
4675 * PARAMETERS :
4676 * @internalFrame#: Output parameter which will have the newly generated internal
4677 *
4678 *
4679 * RETURN : Error code
4680 *
4681 *==========================================================================*/
4682int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4683{
4684 Mutex::Autolock lock(mRegistryLock);
4685 internalFrameNumber = _nextFreeInternalNumber++;
4686 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4687 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4688 purgeOldEntriesLocked();
4689 return NO_ERROR;
4690}
4691
4692/*===========================================================================
4693 * FUNCTION : getFrameworkFrameNumber
4694 *
4695 * DESCRIPTION: Method to query the framework framenumber given an internal #
4696 *
4697 * PARAMETERS :
4698 * @internalFrame#: Internal reference
4699 * @frameworkframenumber: Output parameter holding framework frame entry
4700 *
4701 * RETURN : Error code
4702 *
4703 *==========================================================================*/
4704int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4705 uint32_t &frameworkFrameNumber)
4706{
4707 Mutex::Autolock lock(mRegistryLock);
4708 auto itr = _register.find(internalFrameNumber);
4709 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004710 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004711 return -ENOENT;
4712 }
4713
4714 frameworkFrameNumber = itr->second;
4715 purgeOldEntriesLocked();
4716 return NO_ERROR;
4717}
Thierry Strudel3d639192016-09-09 11:52:26 -07004718
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004719status_t QCamera3HardwareInterface::fillPbStreamConfig(
4720 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4721 QCamera3Channel *channel, uint32_t streamIndex) {
4722 if (config == nullptr) {
4723 LOGE("%s: config is null", __FUNCTION__);
4724 return BAD_VALUE;
4725 }
4726
4727 if (channel == nullptr) {
4728 LOGE("%s: channel is null", __FUNCTION__);
4729 return BAD_VALUE;
4730 }
4731
4732 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4733 if (stream == nullptr) {
4734 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4735 return NAME_NOT_FOUND;
4736 }
4737
4738 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4739 if (streamInfo == nullptr) {
4740 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4741 return NAME_NOT_FOUND;
4742 }
4743
4744 config->id = pbStreamId;
4745 config->image.width = streamInfo->dim.width;
4746 config->image.height = streamInfo->dim.height;
4747 config->image.padding = 0;
4748 config->image.format = pbStreamFormat;
4749
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004750 uint32_t totalPlaneSize = 0;
4751
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004752 // Fill plane information.
4753 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4754 pbcamera::PlaneConfiguration plane;
4755 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4756 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4757 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004758
4759 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004760 }
4761
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004762 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004763 return OK;
4764}
4765
Thierry Strudel3d639192016-09-09 11:52:26 -07004766/*===========================================================================
4767 * FUNCTION : processCaptureRequest
4768 *
4769 * DESCRIPTION: process a capture request from camera service
4770 *
4771 * PARAMETERS :
4772 * @request : request from framework to process
4773 *
4774 * RETURN :
4775 *
4776 *==========================================================================*/
4777int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004778 camera3_capture_request_t *request,
4779 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004780{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004781 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004782 int rc = NO_ERROR;
4783 int32_t request_id;
4784 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004785 bool isVidBufRequested = false;
4786 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004787 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004788
4789 pthread_mutex_lock(&mMutex);
4790
4791 // Validate current state
4792 switch (mState) {
4793 case CONFIGURED:
4794 case STARTED:
4795 /* valid state */
4796 break;
4797
4798 case ERROR:
4799 pthread_mutex_unlock(&mMutex);
4800 handleCameraDeviceError();
4801 return -ENODEV;
4802
4803 default:
4804 LOGE("Invalid state %d", mState);
4805 pthread_mutex_unlock(&mMutex);
4806 return -ENODEV;
4807 }
4808
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004809 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004810 if (rc != NO_ERROR) {
4811 LOGE("incoming request is not valid");
4812 pthread_mutex_unlock(&mMutex);
4813 return rc;
4814 }
4815
4816 meta = request->settings;
4817
4818 // For first capture request, send capture intent, and
4819 // stream on all streams
4820 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004821 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004822 // send an unconfigure to the backend so that the isp
4823 // resources are deallocated
4824 if (!mFirstConfiguration) {
4825 cam_stream_size_info_t stream_config_info;
4826 int32_t hal_version = CAM_HAL_V3;
4827 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4828 stream_config_info.buffer_info.min_buffers =
4829 MIN_INFLIGHT_REQUESTS;
4830 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004831 m_bIs4KVideo ? 0 :
4832 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004833 clear_metadata_buffer(mParameters);
4834 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4835 CAM_INTF_PARM_HAL_VERSION, hal_version);
4836 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4837 CAM_INTF_META_STREAM_INFO, stream_config_info);
4838 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4839 mParameters);
4840 if (rc < 0) {
4841 LOGE("set_parms for unconfigure failed");
4842 pthread_mutex_unlock(&mMutex);
4843 return rc;
4844 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004845
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004847 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004848 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004849 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004850 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004851 property_get("persist.camera.is_type", is_type_value, "4");
4852 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4853 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4854 property_get("persist.camera.is_type_preview", is_type_value, "4");
4855 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4856 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004857
4858 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4859 int32_t hal_version = CAM_HAL_V3;
4860 uint8_t captureIntent =
4861 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4862 mCaptureIntent = captureIntent;
4863 clear_metadata_buffer(mParameters);
4864 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4865 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4866 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004867 if (mFirstConfiguration) {
4868 // configure instant AEC
4869 // Instant AEC is a session based parameter and it is needed only
4870 // once per complete session after open camera.
4871 // i.e. This is set only once for the first capture request, after open camera.
4872 setInstantAEC(meta);
4873 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004874 uint8_t fwkVideoStabMode=0;
4875 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4876 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4877 }
4878
Xue Tuecac74e2017-04-17 13:58:15 -07004879 // If EIS setprop is enabled then only turn it on for video/preview
4880 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004881 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004882 int32_t vsMode;
4883 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4884 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4885 rc = BAD_VALUE;
4886 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004887 LOGD("setEis %d", setEis);
4888 bool eis3Supported = false;
4889 size_t count = IS_TYPE_MAX;
4890 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4891 for (size_t i = 0; i < count; i++) {
4892 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4893 eis3Supported = true;
4894 break;
4895 }
4896 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004897
4898 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004899 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004900 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4901 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004902 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4903 is_type = isTypePreview;
4904 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4905 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4906 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004907 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004908 } else {
4909 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004910 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004911 } else {
4912 is_type = IS_TYPE_NONE;
4913 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004915 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004916 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4917 }
4918 }
4919
4920 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4921 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4922
Thierry Strudel54dc9782017-02-15 12:12:10 -08004923 //Disable tintless only if the property is set to 0
4924 memset(prop, 0, sizeof(prop));
4925 property_get("persist.camera.tintless.enable", prop, "1");
4926 int32_t tintless_value = atoi(prop);
4927
Thierry Strudel3d639192016-09-09 11:52:26 -07004928 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4929 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004930
Thierry Strudel3d639192016-09-09 11:52:26 -07004931 //Disable CDS for HFR mode or if DIS/EIS is on.
4932 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4933 //after every configure_stream
4934 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4935 (m_bIsVideo)) {
4936 int32_t cds = CAM_CDS_MODE_OFF;
4937 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4938 CAM_INTF_PARM_CDS_MODE, cds))
4939 LOGE("Failed to disable CDS for HFR mode");
4940
4941 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004942
4943 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4944 uint8_t* use_av_timer = NULL;
4945
4946 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004947 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004948 use_av_timer = &m_debug_avtimer;
4949 }
4950 else{
4951 use_av_timer =
4952 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004953 if (use_av_timer) {
4954 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4955 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004956 }
4957
4958 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4959 rc = BAD_VALUE;
4960 }
4961 }
4962
Thierry Strudel3d639192016-09-09 11:52:26 -07004963 setMobicat();
4964
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004965 uint8_t nrMode = 0;
4966 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4967 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4968 }
4969
Thierry Strudel3d639192016-09-09 11:52:26 -07004970 /* Set fps and hfr mode while sending meta stream info so that sensor
4971 * can configure appropriate streaming mode */
4972 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004973 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4974 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004975 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4976 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004977 if (rc == NO_ERROR) {
4978 int32_t max_fps =
4979 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004980 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004981 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4982 }
4983 /* For HFR, more buffers are dequeued upfront to improve the performance */
4984 if (mBatchSize) {
4985 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4986 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4987 }
4988 }
4989 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004990 LOGE("setHalFpsRange failed");
4991 }
4992 }
4993 if (meta.exists(ANDROID_CONTROL_MODE)) {
4994 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4995 rc = extractSceneMode(meta, metaMode, mParameters);
4996 if (rc != NO_ERROR) {
4997 LOGE("extractSceneMode failed");
4998 }
4999 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005000 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005001
Thierry Strudel04e026f2016-10-10 11:27:36 -07005002 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5003 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5004 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5005 rc = setVideoHdrMode(mParameters, vhdr);
5006 if (rc != NO_ERROR) {
5007 LOGE("setVideoHDR is failed");
5008 }
5009 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005010
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005011 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005012 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005013 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005014 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5015 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5016 sensorModeFullFov)) {
5017 rc = BAD_VALUE;
5018 }
5019 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005020 //TODO: validate the arguments, HSV scenemode should have only the
5021 //advertised fps ranges
5022
5023 /*set the capture intent, hal version, tintless, stream info,
5024 *and disenable parameters to the backend*/
5025 LOGD("set_parms META_STREAM_INFO " );
5026 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005027 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5028 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005029 mStreamConfigInfo.type[i],
5030 mStreamConfigInfo.stream_sizes[i].width,
5031 mStreamConfigInfo.stream_sizes[i].height,
5032 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005033 mStreamConfigInfo.format[i],
5034 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005035 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005036
Thierry Strudel3d639192016-09-09 11:52:26 -07005037 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5038 mParameters);
5039 if (rc < 0) {
5040 LOGE("set_parms failed for hal version, stream info");
5041 }
5042
Chien-Yu Chenee335912017-02-09 17:53:20 -08005043 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5044 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005045 if (rc != NO_ERROR) {
5046 LOGE("Failed to get sensor output size");
5047 pthread_mutex_unlock(&mMutex);
5048 goto error_exit;
5049 }
5050
5051 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5052 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08005053 mSensorModeInfo.active_array_size.width,
5054 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005055
5056 /* Set batchmode before initializing channel. Since registerBuffer
5057 * internally initializes some of the channels, better set batchmode
5058 * even before first register buffer */
5059 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5060 it != mStreamInfo.end(); it++) {
5061 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5062 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5063 && mBatchSize) {
5064 rc = channel->setBatchSize(mBatchSize);
5065 //Disable per frame map unmap for HFR/batchmode case
5066 rc |= channel->setPerFrameMapUnmap(false);
5067 if (NO_ERROR != rc) {
5068 LOGE("Channel init failed %d", rc);
5069 pthread_mutex_unlock(&mMutex);
5070 goto error_exit;
5071 }
5072 }
5073 }
5074
5075 //First initialize all streams
5076 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5077 it != mStreamInfo.end(); it++) {
5078 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005079
5080 /* Initial value of NR mode is needed before stream on */
5081 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005082 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5083 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005084 setEis) {
5085 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5086 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5087 is_type = mStreamConfigInfo.is_type[i];
5088 break;
5089 }
5090 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005091 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005092 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005093 rc = channel->initialize(IS_TYPE_NONE);
5094 }
5095 if (NO_ERROR != rc) {
5096 LOGE("Channel initialization failed %d", rc);
5097 pthread_mutex_unlock(&mMutex);
5098 goto error_exit;
5099 }
5100 }
5101
5102 if (mRawDumpChannel) {
5103 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5104 if (rc != NO_ERROR) {
5105 LOGE("Error: Raw Dump Channel init failed");
5106 pthread_mutex_unlock(&mMutex);
5107 goto error_exit;
5108 }
5109 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005110 if (mHdrPlusRawSrcChannel) {
5111 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5112 if (rc != NO_ERROR) {
5113 LOGE("Error: HDR+ RAW Source Channel init failed");
5114 pthread_mutex_unlock(&mMutex);
5115 goto error_exit;
5116 }
5117 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 if (mSupportChannel) {
5119 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5120 if (rc < 0) {
5121 LOGE("Support channel initialization failed");
5122 pthread_mutex_unlock(&mMutex);
5123 goto error_exit;
5124 }
5125 }
5126 if (mAnalysisChannel) {
5127 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5128 if (rc < 0) {
5129 LOGE("Analysis channel initialization failed");
5130 pthread_mutex_unlock(&mMutex);
5131 goto error_exit;
5132 }
5133 }
5134 if (mDummyBatchChannel) {
5135 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5136 if (rc < 0) {
5137 LOGE("mDummyBatchChannel setBatchSize failed");
5138 pthread_mutex_unlock(&mMutex);
5139 goto error_exit;
5140 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005141 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005142 if (rc < 0) {
5143 LOGE("mDummyBatchChannel initialization failed");
5144 pthread_mutex_unlock(&mMutex);
5145 goto error_exit;
5146 }
5147 }
5148
5149 // Set bundle info
5150 rc = setBundleInfo();
5151 if (rc < 0) {
5152 LOGE("setBundleInfo failed %d", rc);
5153 pthread_mutex_unlock(&mMutex);
5154 goto error_exit;
5155 }
5156
5157 //update settings from app here
5158 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5159 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5160 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5161 }
5162 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5163 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5164 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5165 }
5166 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5167 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5168 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5169
5170 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5171 (mLinkedCameraId != mCameraId) ) {
5172 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5173 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005174 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005175 goto error_exit;
5176 }
5177 }
5178
5179 // add bundle related cameras
5180 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5181 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005182 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5183 &m_pDualCamCmdPtr->bundle_info;
5184 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005185 if (mIsDeviceLinked)
5186 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5187 else
5188 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5189
5190 pthread_mutex_lock(&gCamLock);
5191
5192 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5193 LOGE("Dualcam: Invalid Session Id ");
5194 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005195 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005196 goto error_exit;
5197 }
5198
5199 if (mIsMainCamera == 1) {
5200 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5201 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005202 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005203 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005204 // related session id should be session id of linked session
5205 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5206 } else {
5207 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5208 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005209 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005210 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005211 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5212 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005213 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005214 pthread_mutex_unlock(&gCamLock);
5215
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005216 rc = mCameraHandle->ops->set_dual_cam_cmd(
5217 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005218 if (rc < 0) {
5219 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005220 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005221 goto error_exit;
5222 }
5223 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005224 goto no_error;
5225error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005226 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 return rc;
5228no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005229 mWokenUpByDaemon = false;
5230 mPendingLiveRequest = 0;
5231 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005232 }
5233
5234 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005235 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005236
5237 if (mFlushPerf) {
5238 //we cannot accept any requests during flush
5239 LOGE("process_capture_request cannot proceed during flush");
5240 pthread_mutex_unlock(&mMutex);
5241 return NO_ERROR; //should return an error
5242 }
5243
5244 if (meta.exists(ANDROID_REQUEST_ID)) {
5245 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5246 mCurrentRequestId = request_id;
5247 LOGD("Received request with id: %d", request_id);
5248 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5249 LOGE("Unable to find request id field, \
5250 & no previous id available");
5251 pthread_mutex_unlock(&mMutex);
5252 return NAME_NOT_FOUND;
5253 } else {
5254 LOGD("Re-using old request id");
5255 request_id = mCurrentRequestId;
5256 }
5257
5258 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5259 request->num_output_buffers,
5260 request->input_buffer,
5261 frameNumber);
5262 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005263 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005264 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005265 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005266 uint32_t snapshotStreamId = 0;
5267 for (size_t i = 0; i < request->num_output_buffers; i++) {
5268 const camera3_stream_buffer_t& output = request->output_buffers[i];
5269 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5270
Emilian Peev7650c122017-01-19 08:24:33 -08005271 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5272 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005273 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005274 blob_request = 1;
5275 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5276 }
5277
5278 if (output.acquire_fence != -1) {
5279 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5280 close(output.acquire_fence);
5281 if (rc != OK) {
5282 LOGE("sync wait failed %d", rc);
5283 pthread_mutex_unlock(&mMutex);
5284 return rc;
5285 }
5286 }
5287
Emilian Peev0f3c3162017-03-15 12:57:46 +00005288 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5289 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005290 depthRequestPresent = true;
5291 continue;
5292 }
5293
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005294 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005295 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005296
5297 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5298 isVidBufRequested = true;
5299 }
5300 }
5301
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005302 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5303 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5304 itr++) {
5305 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5306 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5307 channel->getStreamID(channel->getStreamTypeMask());
5308
5309 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5310 isVidBufRequested = true;
5311 }
5312 }
5313
Thierry Strudel3d639192016-09-09 11:52:26 -07005314 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005315 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005316 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005317 }
5318 if (blob_request && mRawDumpChannel) {
5319 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005320 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005321 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005322 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005323 }
5324
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005325 {
5326 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5327 // Request a RAW buffer if
5328 // 1. mHdrPlusRawSrcChannel is valid.
5329 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5330 // 3. There is no pending HDR+ request.
5331 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5332 mHdrPlusPendingRequests.size() == 0) {
5333 streamsArray.stream_request[streamsArray.num_streams].streamID =
5334 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5335 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5336 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005337 }
5338
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005339 //extract capture intent
5340 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5341 mCaptureIntent =
5342 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5343 }
5344
5345 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5346 mCacMode =
5347 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5348 }
5349
5350 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005351 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005352
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005353 {
5354 Mutex::Autolock l(gHdrPlusClientLock);
5355 // If this request has a still capture intent, try to submit an HDR+ request.
5356 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5357 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5358 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5359 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005360 }
5361
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005362 if (hdrPlusRequest) {
5363 // For a HDR+ request, just set the frame parameters.
5364 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5365 if (rc < 0) {
5366 LOGE("fail to set frame parameters");
5367 pthread_mutex_unlock(&mMutex);
5368 return rc;
5369 }
5370 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005371 /* Parse the settings:
5372 * - For every request in NORMAL MODE
5373 * - For every request in HFR mode during preview only case
5374 * - For first request of every batch in HFR mode during video
5375 * recording. In batchmode the same settings except frame number is
5376 * repeated in each request of the batch.
5377 */
5378 if (!mBatchSize ||
5379 (mBatchSize && !isVidBufRequested) ||
5380 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005381 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005382 if (rc < 0) {
5383 LOGE("fail to set frame parameters");
5384 pthread_mutex_unlock(&mMutex);
5385 return rc;
5386 }
5387 }
5388 /* For batchMode HFR, setFrameParameters is not called for every
5389 * request. But only frame number of the latest request is parsed.
5390 * Keep track of first and last frame numbers in a batch so that
5391 * metadata for the frame numbers of batch can be duplicated in
5392 * handleBatchMetadta */
5393 if (mBatchSize) {
5394 if (!mToBeQueuedVidBufs) {
5395 //start of the batch
5396 mFirstFrameNumberInBatch = request->frame_number;
5397 }
5398 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5399 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5400 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005401 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005402 return BAD_VALUE;
5403 }
5404 }
5405 if (mNeedSensorRestart) {
5406 /* Unlock the mutex as restartSensor waits on the channels to be
5407 * stopped, which in turn calls stream callback functions -
5408 * handleBufferWithLock and handleMetadataWithLock */
5409 pthread_mutex_unlock(&mMutex);
5410 rc = dynamicUpdateMetaStreamInfo();
5411 if (rc != NO_ERROR) {
5412 LOGE("Restarting the sensor failed");
5413 return BAD_VALUE;
5414 }
5415 mNeedSensorRestart = false;
5416 pthread_mutex_lock(&mMutex);
5417 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005418 if(mResetInstantAEC) {
5419 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5420 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5421 mResetInstantAEC = false;
5422 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005423 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005424 if (request->input_buffer->acquire_fence != -1) {
5425 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5426 close(request->input_buffer->acquire_fence);
5427 if (rc != OK) {
5428 LOGE("input buffer sync wait failed %d", rc);
5429 pthread_mutex_unlock(&mMutex);
5430 return rc;
5431 }
5432 }
5433 }
5434
5435 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5436 mLastCustIntentFrmNum = frameNumber;
5437 }
5438 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005439 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005440 pendingRequestIterator latestRequest;
5441 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005442 pendingRequest.num_buffers = depthRequestPresent ?
5443 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005444 pendingRequest.request_id = request_id;
5445 pendingRequest.blob_request = blob_request;
5446 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005447 if (request->input_buffer) {
5448 pendingRequest.input_buffer =
5449 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5450 *(pendingRequest.input_buffer) = *(request->input_buffer);
5451 pInputBuffer = pendingRequest.input_buffer;
5452 } else {
5453 pendingRequest.input_buffer = NULL;
5454 pInputBuffer = NULL;
5455 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005456 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005457
5458 pendingRequest.pipeline_depth = 0;
5459 pendingRequest.partial_result_cnt = 0;
5460 extractJpegMetadata(mCurJpegMeta, request);
5461 pendingRequest.jpegMetadata = mCurJpegMeta;
5462 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5463 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005464 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005465 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5466 mHybridAeEnable =
5467 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5468 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005469
5470 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5471 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005472 /* DevCamDebug metadata processCaptureRequest */
5473 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5474 mDevCamDebugMetaEnable =
5475 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5476 }
5477 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5478 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005479
5480 //extract CAC info
5481 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5482 mCacMode =
5483 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5484 }
5485 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005486 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005487
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005488 // extract enableZsl info
5489 if (gExposeEnableZslKey) {
5490 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5491 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5492 mZslEnabled = pendingRequest.enableZsl;
5493 } else {
5494 pendingRequest.enableZsl = mZslEnabled;
5495 }
5496 }
5497
Thierry Strudel3d639192016-09-09 11:52:26 -07005498 PendingBuffersInRequest bufsForCurRequest;
5499 bufsForCurRequest.frame_number = frameNumber;
5500 // Mark current timestamp for the new request
5501 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005502 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005503
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005504 if (hdrPlusRequest) {
5505 // Save settings for this request.
5506 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5507 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5508
5509 // Add to pending HDR+ request queue.
5510 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5511 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5512
5513 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5514 }
5515
Thierry Strudel3d639192016-09-09 11:52:26 -07005516 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005517 if ((request->output_buffers[i].stream->data_space ==
5518 HAL_DATASPACE_DEPTH) &&
5519 (HAL_PIXEL_FORMAT_BLOB ==
5520 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005521 continue;
5522 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005523 RequestedBufferInfo requestedBuf;
5524 memset(&requestedBuf, 0, sizeof(requestedBuf));
5525 requestedBuf.stream = request->output_buffers[i].stream;
5526 requestedBuf.buffer = NULL;
5527 pendingRequest.buffers.push_back(requestedBuf);
5528
5529 // Add to buffer handle the pending buffers list
5530 PendingBufferInfo bufferInfo;
5531 bufferInfo.buffer = request->output_buffers[i].buffer;
5532 bufferInfo.stream = request->output_buffers[i].stream;
5533 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5534 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5535 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5536 frameNumber, bufferInfo.buffer,
5537 channel->getStreamTypeMask(), bufferInfo.stream->format);
5538 }
5539 // Add this request packet into mPendingBuffersMap
5540 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5541 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5542 mPendingBuffersMap.get_num_overall_buffers());
5543
5544 latestRequest = mPendingRequestsList.insert(
5545 mPendingRequestsList.end(), pendingRequest);
5546 if(mFlush) {
5547 LOGI("mFlush is true");
5548 pthread_mutex_unlock(&mMutex);
5549 return NO_ERROR;
5550 }
5551
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005552 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5553 // channel.
5554 if (!hdrPlusRequest) {
5555 int indexUsed;
5556 // Notify metadata channel we receive a request
5557 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005558
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005559 if(request->input_buffer != NULL){
5560 LOGD("Input request, frame_number %d", frameNumber);
5561 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5562 if (NO_ERROR != rc) {
5563 LOGE("fail to set reproc parameters");
5564 pthread_mutex_unlock(&mMutex);
5565 return rc;
5566 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005567 }
5568
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005569 // Call request on other streams
5570 uint32_t streams_need_metadata = 0;
5571 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5572 for (size_t i = 0; i < request->num_output_buffers; i++) {
5573 const camera3_stream_buffer_t& output = request->output_buffers[i];
5574 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5575
5576 if (channel == NULL) {
5577 LOGW("invalid channel pointer for stream");
5578 continue;
5579 }
5580
5581 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5582 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5583 output.buffer, request->input_buffer, frameNumber);
5584 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005585 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005586 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5587 if (rc < 0) {
5588 LOGE("Fail to request on picture channel");
5589 pthread_mutex_unlock(&mMutex);
5590 return rc;
5591 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005592 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005593 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5594 assert(NULL != mDepthChannel);
5595 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005596
Emilian Peev7650c122017-01-19 08:24:33 -08005597 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5598 if (rc < 0) {
5599 LOGE("Fail to map on depth buffer");
5600 pthread_mutex_unlock(&mMutex);
5601 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005602 }
Emilian Peev7650c122017-01-19 08:24:33 -08005603 } else {
5604 LOGD("snapshot request with buffer %p, frame_number %d",
5605 output.buffer, frameNumber);
5606 if (!request->settings) {
5607 rc = channel->request(output.buffer, frameNumber,
5608 NULL, mPrevParameters, indexUsed);
5609 } else {
5610 rc = channel->request(output.buffer, frameNumber,
5611 NULL, mParameters, indexUsed);
5612 }
5613 if (rc < 0) {
5614 LOGE("Fail to request on picture channel");
5615 pthread_mutex_unlock(&mMutex);
5616 return rc;
5617 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005618
Emilian Peev7650c122017-01-19 08:24:33 -08005619 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5620 uint32_t j = 0;
5621 for (j = 0; j < streamsArray.num_streams; j++) {
5622 if (streamsArray.stream_request[j].streamID == streamId) {
5623 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5624 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5625 else
5626 streamsArray.stream_request[j].buf_index = indexUsed;
5627 break;
5628 }
5629 }
5630 if (j == streamsArray.num_streams) {
5631 LOGE("Did not find matching stream to update index");
5632 assert(0);
5633 }
5634
5635 pendingBufferIter->need_metadata = true;
5636 streams_need_metadata++;
5637 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005638 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005639 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5640 bool needMetadata = false;
5641 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5642 rc = yuvChannel->request(output.buffer, frameNumber,
5643 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5644 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005645 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005646 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005647 pthread_mutex_unlock(&mMutex);
5648 return rc;
5649 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005650
5651 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5652 uint32_t j = 0;
5653 for (j = 0; j < streamsArray.num_streams; j++) {
5654 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005655 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5656 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5657 else
5658 streamsArray.stream_request[j].buf_index = indexUsed;
5659 break;
5660 }
5661 }
5662 if (j == streamsArray.num_streams) {
5663 LOGE("Did not find matching stream to update index");
5664 assert(0);
5665 }
5666
5667 pendingBufferIter->need_metadata = needMetadata;
5668 if (needMetadata)
5669 streams_need_metadata += 1;
5670 LOGD("calling YUV channel request, need_metadata is %d",
5671 needMetadata);
5672 } else {
5673 LOGD("request with buffer %p, frame_number %d",
5674 output.buffer, frameNumber);
5675
5676 rc = channel->request(output.buffer, frameNumber, indexUsed);
5677
5678 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5679 uint32_t j = 0;
5680 for (j = 0; j < streamsArray.num_streams; j++) {
5681 if (streamsArray.stream_request[j].streamID == streamId) {
5682 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5683 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5684 else
5685 streamsArray.stream_request[j].buf_index = indexUsed;
5686 break;
5687 }
5688 }
5689 if (j == streamsArray.num_streams) {
5690 LOGE("Did not find matching stream to update index");
5691 assert(0);
5692 }
5693
5694 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5695 && mBatchSize) {
5696 mToBeQueuedVidBufs++;
5697 if (mToBeQueuedVidBufs == mBatchSize) {
5698 channel->queueBatchBuf();
5699 }
5700 }
5701 if (rc < 0) {
5702 LOGE("request failed");
5703 pthread_mutex_unlock(&mMutex);
5704 return rc;
5705 }
5706 }
5707 pendingBufferIter++;
5708 }
5709
5710 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5711 itr++) {
5712 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5713
5714 if (channel == NULL) {
5715 LOGE("invalid channel pointer for stream");
5716 assert(0);
5717 return BAD_VALUE;
5718 }
5719
5720 InternalRequest requestedStream;
5721 requestedStream = (*itr);
5722
5723
5724 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5725 LOGD("snapshot request internally input buffer %p, frame_number %d",
5726 request->input_buffer, frameNumber);
5727 if(request->input_buffer != NULL){
5728 rc = channel->request(NULL, frameNumber,
5729 pInputBuffer, &mReprocMeta, indexUsed, true,
5730 requestedStream.meteringOnly);
5731 if (rc < 0) {
5732 LOGE("Fail to request on picture channel");
5733 pthread_mutex_unlock(&mMutex);
5734 return rc;
5735 }
5736 } else {
5737 LOGD("snapshot request with frame_number %d", frameNumber);
5738 if (!request->settings) {
5739 rc = channel->request(NULL, frameNumber,
5740 NULL, mPrevParameters, indexUsed, true,
5741 requestedStream.meteringOnly);
5742 } else {
5743 rc = channel->request(NULL, frameNumber,
5744 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5745 }
5746 if (rc < 0) {
5747 LOGE("Fail to request on picture channel");
5748 pthread_mutex_unlock(&mMutex);
5749 return rc;
5750 }
5751
5752 if ((*itr).meteringOnly != 1) {
5753 requestedStream.need_metadata = 1;
5754 streams_need_metadata++;
5755 }
5756 }
5757
5758 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5759 uint32_t j = 0;
5760 for (j = 0; j < streamsArray.num_streams; j++) {
5761 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005762 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5763 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5764 else
5765 streamsArray.stream_request[j].buf_index = indexUsed;
5766 break;
5767 }
5768 }
5769 if (j == streamsArray.num_streams) {
5770 LOGE("Did not find matching stream to update index");
5771 assert(0);
5772 }
5773
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005774 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005775 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005776 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005777 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005778 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005779 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005780 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005781
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005782 //If 2 streams have need_metadata set to true, fail the request, unless
5783 //we copy/reference count the metadata buffer
5784 if (streams_need_metadata > 1) {
5785 LOGE("not supporting request in which two streams requires"
5786 " 2 HAL metadata for reprocessing");
5787 pthread_mutex_unlock(&mMutex);
5788 return -EINVAL;
5789 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005790
Emilian Peev7650c122017-01-19 08:24:33 -08005791 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5792 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5793 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5794 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5795 pthread_mutex_unlock(&mMutex);
5796 return BAD_VALUE;
5797 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005798 if (request->input_buffer == NULL) {
5799 /* Set the parameters to backend:
5800 * - For every request in NORMAL MODE
5801 * - For every request in HFR mode during preview only case
5802 * - Once every batch in HFR mode during video recording
5803 */
5804 if (!mBatchSize ||
5805 (mBatchSize && !isVidBufRequested) ||
5806 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5807 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5808 mBatchSize, isVidBufRequested,
5809 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005810
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005811 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5812 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5813 uint32_t m = 0;
5814 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5815 if (streamsArray.stream_request[k].streamID ==
5816 mBatchedStreamsArray.stream_request[m].streamID)
5817 break;
5818 }
5819 if (m == mBatchedStreamsArray.num_streams) {
5820 mBatchedStreamsArray.stream_request\
5821 [mBatchedStreamsArray.num_streams].streamID =
5822 streamsArray.stream_request[k].streamID;
5823 mBatchedStreamsArray.stream_request\
5824 [mBatchedStreamsArray.num_streams].buf_index =
5825 streamsArray.stream_request[k].buf_index;
5826 mBatchedStreamsArray.num_streams =
5827 mBatchedStreamsArray.num_streams + 1;
5828 }
5829 }
5830 streamsArray = mBatchedStreamsArray;
5831 }
5832 /* Update stream id of all the requested buffers */
5833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5834 streamsArray)) {
5835 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005836 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005837 return BAD_VALUE;
5838 }
5839
5840 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5841 mParameters);
5842 if (rc < 0) {
5843 LOGE("set_parms failed");
5844 }
5845 /* reset to zero coz, the batch is queued */
5846 mToBeQueuedVidBufs = 0;
5847 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5848 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5849 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005850 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5851 uint32_t m = 0;
5852 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5853 if (streamsArray.stream_request[k].streamID ==
5854 mBatchedStreamsArray.stream_request[m].streamID)
5855 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005856 }
5857 if (m == mBatchedStreamsArray.num_streams) {
5858 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5859 streamID = streamsArray.stream_request[k].streamID;
5860 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5861 buf_index = streamsArray.stream_request[k].buf_index;
5862 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5863 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005864 }
5865 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005866 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005867
5868 // Start all streams after the first setting is sent, so that the
5869 // setting can be applied sooner: (0 + apply_delay)th frame.
5870 if (mState == CONFIGURED && mChannelHandle) {
5871 //Then start them.
5872 LOGH("Start META Channel");
5873 rc = mMetadataChannel->start();
5874 if (rc < 0) {
5875 LOGE("META channel start failed");
5876 pthread_mutex_unlock(&mMutex);
5877 return rc;
5878 }
5879
5880 if (mAnalysisChannel) {
5881 rc = mAnalysisChannel->start();
5882 if (rc < 0) {
5883 LOGE("Analysis channel start failed");
5884 mMetadataChannel->stop();
5885 pthread_mutex_unlock(&mMutex);
5886 return rc;
5887 }
5888 }
5889
5890 if (mSupportChannel) {
5891 rc = mSupportChannel->start();
5892 if (rc < 0) {
5893 LOGE("Support channel start failed");
5894 mMetadataChannel->stop();
5895 /* Although support and analysis are mutually exclusive today
5896 adding it in anycase for future proofing */
5897 if (mAnalysisChannel) {
5898 mAnalysisChannel->stop();
5899 }
5900 pthread_mutex_unlock(&mMutex);
5901 return rc;
5902 }
5903 }
5904 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5905 it != mStreamInfo.end(); it++) {
5906 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5907 LOGH("Start Processing Channel mask=%d",
5908 channel->getStreamTypeMask());
5909 rc = channel->start();
5910 if (rc < 0) {
5911 LOGE("channel start failed");
5912 pthread_mutex_unlock(&mMutex);
5913 return rc;
5914 }
5915 }
5916
5917 if (mRawDumpChannel) {
5918 LOGD("Starting raw dump stream");
5919 rc = mRawDumpChannel->start();
5920 if (rc != NO_ERROR) {
5921 LOGE("Error Starting Raw Dump Channel");
5922 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5923 it != mStreamInfo.end(); it++) {
5924 QCamera3Channel *channel =
5925 (QCamera3Channel *)(*it)->stream->priv;
5926 LOGH("Stopping Processing Channel mask=%d",
5927 channel->getStreamTypeMask());
5928 channel->stop();
5929 }
5930 if (mSupportChannel)
5931 mSupportChannel->stop();
5932 if (mAnalysisChannel) {
5933 mAnalysisChannel->stop();
5934 }
5935 mMetadataChannel->stop();
5936 pthread_mutex_unlock(&mMutex);
5937 return rc;
5938 }
5939 }
5940
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005941 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005942 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005943 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005944 if (rc != NO_ERROR) {
5945 LOGE("start_channel failed %d", rc);
5946 pthread_mutex_unlock(&mMutex);
5947 return rc;
5948 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005949
5950 {
5951 // Configure Easel for stream on.
5952 Mutex::Autolock l(gHdrPlusClientLock);
5953 if (EaselManagerClientOpened) {
5954 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
5955 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
5956 if (rc != OK) {
5957 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5958 mCameraId, mSensorModeInfo.op_pixel_clk);
5959 pthread_mutex_unlock(&mMutex);
5960 return rc;
5961 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005962 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005963 }
5964 }
5965
5966 // Start sensor streaming.
5967 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5968 mChannelHandle);
5969 if (rc != NO_ERROR) {
5970 LOGE("start_sensor_stream_on failed %d", rc);
5971 pthread_mutex_unlock(&mMutex);
5972 return rc;
5973 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005974 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005975 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005976 }
5977
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005978 // Enable HDR+ mode for the first PREVIEW_INTENT request.
5979 {
5980 Mutex::Autolock l(gHdrPlusClientLock);
5981 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5982 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5983 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5984 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5985 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5986 rc = enableHdrPlusModeLocked();
5987 if (rc != OK) {
5988 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5989 pthread_mutex_unlock(&mMutex);
5990 return rc;
5991 }
5992
5993 mFirstPreviewIntentSeen = true;
5994 }
5995 }
5996
Thierry Strudel3d639192016-09-09 11:52:26 -07005997 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5998
5999 mState = STARTED;
6000 // Added a timed condition wait
6001 struct timespec ts;
6002 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006003 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006004 if (rc < 0) {
6005 isValidTimeout = 0;
6006 LOGE("Error reading the real time clock!!");
6007 }
6008 else {
6009 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006010 int64_t timeout = 5;
6011 {
6012 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6013 // If there is a pending HDR+ request, the following requests may be blocked until the
6014 // HDR+ request is done. So allow a longer timeout.
6015 if (mHdrPlusPendingRequests.size() > 0) {
6016 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6017 }
6018 }
6019 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006020 }
6021 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006022 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006023 (mState != ERROR) && (mState != DEINIT)) {
6024 if (!isValidTimeout) {
6025 LOGD("Blocking on conditional wait");
6026 pthread_cond_wait(&mRequestCond, &mMutex);
6027 }
6028 else {
6029 LOGD("Blocking on timed conditional wait");
6030 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6031 if (rc == ETIMEDOUT) {
6032 rc = -ENODEV;
6033 LOGE("Unblocked on timeout!!!!");
6034 break;
6035 }
6036 }
6037 LOGD("Unblocked");
6038 if (mWokenUpByDaemon) {
6039 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006040 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006041 break;
6042 }
6043 }
6044 pthread_mutex_unlock(&mMutex);
6045
6046 return rc;
6047}
6048
6049/*===========================================================================
6050 * FUNCTION : dump
6051 *
6052 * DESCRIPTION:
6053 *
6054 * PARAMETERS :
6055 *
6056 *
6057 * RETURN :
6058 *==========================================================================*/
6059void QCamera3HardwareInterface::dump(int fd)
6060{
6061 pthread_mutex_lock(&mMutex);
6062 dprintf(fd, "\n Camera HAL3 information Begin \n");
6063
6064 dprintf(fd, "\nNumber of pending requests: %zu \n",
6065 mPendingRequestsList.size());
6066 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6067 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6068 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6069 for(pendingRequestIterator i = mPendingRequestsList.begin();
6070 i != mPendingRequestsList.end(); i++) {
6071 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6072 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6073 i->input_buffer);
6074 }
6075 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6076 mPendingBuffersMap.get_num_overall_buffers());
6077 dprintf(fd, "-------+------------------\n");
6078 dprintf(fd, " Frame | Stream type mask \n");
6079 dprintf(fd, "-------+------------------\n");
6080 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6081 for(auto &j : req.mPendingBufferList) {
6082 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6083 dprintf(fd, " %5d | %11d \n",
6084 req.frame_number, channel->getStreamTypeMask());
6085 }
6086 }
6087 dprintf(fd, "-------+------------------\n");
6088
6089 dprintf(fd, "\nPending frame drop list: %zu\n",
6090 mPendingFrameDropList.size());
6091 dprintf(fd, "-------+-----------\n");
6092 dprintf(fd, " Frame | Stream ID \n");
6093 dprintf(fd, "-------+-----------\n");
6094 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6095 i != mPendingFrameDropList.end(); i++) {
6096 dprintf(fd, " %5d | %9d \n",
6097 i->frame_number, i->stream_ID);
6098 }
6099 dprintf(fd, "-------+-----------\n");
6100
6101 dprintf(fd, "\n Camera HAL3 information End \n");
6102
6103 /* use dumpsys media.camera as trigger to send update debug level event */
6104 mUpdateDebugLevel = true;
6105 pthread_mutex_unlock(&mMutex);
6106 return;
6107}
6108
6109/*===========================================================================
6110 * FUNCTION : flush
6111 *
6112 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6113 * conditionally restarts channels
6114 *
6115 * PARAMETERS :
6116 * @ restartChannels: re-start all channels
6117 *
6118 *
6119 * RETURN :
6120 * 0 on success
6121 * Error code on failure
6122 *==========================================================================*/
6123int QCamera3HardwareInterface::flush(bool restartChannels)
6124{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006125 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006126 int32_t rc = NO_ERROR;
6127
6128 LOGD("Unblocking Process Capture Request");
6129 pthread_mutex_lock(&mMutex);
6130 mFlush = true;
6131 pthread_mutex_unlock(&mMutex);
6132
6133 rc = stopAllChannels();
6134 // unlink of dualcam
6135 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006136 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6137 &m_pDualCamCmdPtr->bundle_info;
6138 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006139 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6140 pthread_mutex_lock(&gCamLock);
6141
6142 if (mIsMainCamera == 1) {
6143 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6144 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006145 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006146 // related session id should be session id of linked session
6147 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6148 } else {
6149 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6150 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006151 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006152 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6153 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006154 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 pthread_mutex_unlock(&gCamLock);
6156
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006157 rc = mCameraHandle->ops->set_dual_cam_cmd(
6158 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006159 if (rc < 0) {
6160 LOGE("Dualcam: Unlink failed, but still proceed to close");
6161 }
6162 }
6163
6164 if (rc < 0) {
6165 LOGE("stopAllChannels failed");
6166 return rc;
6167 }
6168 if (mChannelHandle) {
6169 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6170 mChannelHandle);
6171 }
6172
6173 // Reset bundle info
6174 rc = setBundleInfo();
6175 if (rc < 0) {
6176 LOGE("setBundleInfo failed %d", rc);
6177 return rc;
6178 }
6179
6180 // Mutex Lock
6181 pthread_mutex_lock(&mMutex);
6182
6183 // Unblock process_capture_request
6184 mPendingLiveRequest = 0;
6185 pthread_cond_signal(&mRequestCond);
6186
6187 rc = notifyErrorForPendingRequests();
6188 if (rc < 0) {
6189 LOGE("notifyErrorForPendingRequests failed");
6190 pthread_mutex_unlock(&mMutex);
6191 return rc;
6192 }
6193
6194 mFlush = false;
6195
6196 // Start the Streams/Channels
6197 if (restartChannels) {
6198 rc = startAllChannels();
6199 if (rc < 0) {
6200 LOGE("startAllChannels failed");
6201 pthread_mutex_unlock(&mMutex);
6202 return rc;
6203 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006204 if (mChannelHandle) {
6205 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006206 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006207 if (rc < 0) {
6208 LOGE("start_channel failed");
6209 pthread_mutex_unlock(&mMutex);
6210 return rc;
6211 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006212 }
6213 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006214 pthread_mutex_unlock(&mMutex);
6215
6216 return 0;
6217}
6218
6219/*===========================================================================
6220 * FUNCTION : flushPerf
6221 *
6222 * DESCRIPTION: This is the performance optimization version of flush that does
6223 * not use stream off, rather flushes the system
6224 *
6225 * PARAMETERS :
6226 *
6227 *
6228 * RETURN : 0 : success
6229 * -EINVAL: input is malformed (device is not valid)
6230 * -ENODEV: if the device has encountered a serious error
6231 *==========================================================================*/
6232int QCamera3HardwareInterface::flushPerf()
6233{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006234 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006235 int32_t rc = 0;
6236 struct timespec timeout;
6237 bool timed_wait = false;
6238
6239 pthread_mutex_lock(&mMutex);
6240 mFlushPerf = true;
6241 mPendingBuffersMap.numPendingBufsAtFlush =
6242 mPendingBuffersMap.get_num_overall_buffers();
6243 LOGD("Calling flush. Wait for %d buffers to return",
6244 mPendingBuffersMap.numPendingBufsAtFlush);
6245
6246 /* send the flush event to the backend */
6247 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6248 if (rc < 0) {
6249 LOGE("Error in flush: IOCTL failure");
6250 mFlushPerf = false;
6251 pthread_mutex_unlock(&mMutex);
6252 return -ENODEV;
6253 }
6254
6255 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6256 LOGD("No pending buffers in HAL, return flush");
6257 mFlushPerf = false;
6258 pthread_mutex_unlock(&mMutex);
6259 return rc;
6260 }
6261
6262 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006263 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006264 if (rc < 0) {
6265 LOGE("Error reading the real time clock, cannot use timed wait");
6266 } else {
6267 timeout.tv_sec += FLUSH_TIMEOUT;
6268 timed_wait = true;
6269 }
6270
6271 //Block on conditional variable
6272 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6273 LOGD("Waiting on mBuffersCond");
6274 if (!timed_wait) {
6275 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6276 if (rc != 0) {
6277 LOGE("pthread_cond_wait failed due to rc = %s",
6278 strerror(rc));
6279 break;
6280 }
6281 } else {
6282 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6283 if (rc != 0) {
6284 LOGE("pthread_cond_timedwait failed due to rc = %s",
6285 strerror(rc));
6286 break;
6287 }
6288 }
6289 }
6290 if (rc != 0) {
6291 mFlushPerf = false;
6292 pthread_mutex_unlock(&mMutex);
6293 return -ENODEV;
6294 }
6295
6296 LOGD("Received buffers, now safe to return them");
6297
6298 //make sure the channels handle flush
6299 //currently only required for the picture channel to release snapshot resources
6300 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6301 it != mStreamInfo.end(); it++) {
6302 QCamera3Channel *channel = (*it)->channel;
6303 if (channel) {
6304 rc = channel->flush();
6305 if (rc) {
6306 LOGE("Flushing the channels failed with error %d", rc);
6307 // even though the channel flush failed we need to continue and
6308 // return the buffers we have to the framework, however the return
6309 // value will be an error
6310 rc = -ENODEV;
6311 }
6312 }
6313 }
6314
6315 /* notify the frameworks and send errored results */
6316 rc = notifyErrorForPendingRequests();
6317 if (rc < 0) {
6318 LOGE("notifyErrorForPendingRequests failed");
6319 pthread_mutex_unlock(&mMutex);
6320 return rc;
6321 }
6322
6323 //unblock process_capture_request
6324 mPendingLiveRequest = 0;
6325 unblockRequestIfNecessary();
6326
6327 mFlushPerf = false;
6328 pthread_mutex_unlock(&mMutex);
6329 LOGD ("Flush Operation complete. rc = %d", rc);
6330 return rc;
6331}
6332
6333/*===========================================================================
6334 * FUNCTION : handleCameraDeviceError
6335 *
6336 * DESCRIPTION: This function calls internal flush and notifies the error to
6337 * framework and updates the state variable.
6338 *
6339 * PARAMETERS : None
6340 *
6341 * RETURN : NO_ERROR on Success
6342 * Error code on failure
6343 *==========================================================================*/
6344int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6345{
6346 int32_t rc = NO_ERROR;
6347
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006348 {
6349 Mutex::Autolock lock(mFlushLock);
6350 pthread_mutex_lock(&mMutex);
6351 if (mState != ERROR) {
6352 //if mState != ERROR, nothing to be done
6353 pthread_mutex_unlock(&mMutex);
6354 return NO_ERROR;
6355 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006356 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006357
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006358 rc = flush(false /* restart channels */);
6359 if (NO_ERROR != rc) {
6360 LOGE("internal flush to handle mState = ERROR failed");
6361 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006362
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006363 pthread_mutex_lock(&mMutex);
6364 mState = DEINIT;
6365 pthread_mutex_unlock(&mMutex);
6366 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006367
6368 camera3_notify_msg_t notify_msg;
6369 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6370 notify_msg.type = CAMERA3_MSG_ERROR;
6371 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6372 notify_msg.message.error.error_stream = NULL;
6373 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006374 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006375
6376 return rc;
6377}
6378
6379/*===========================================================================
6380 * FUNCTION : captureResultCb
6381 *
6382 * DESCRIPTION: Callback handler for all capture result
6383 * (streams, as well as metadata)
6384 *
6385 * PARAMETERS :
6386 * @metadata : metadata information
6387 * @buffer : actual gralloc buffer to be returned to frameworks.
6388 * NULL if metadata.
6389 *
6390 * RETURN : NONE
6391 *==========================================================================*/
6392void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6393 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6394{
6395 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006396 pthread_mutex_lock(&mMutex);
6397 uint8_t batchSize = mBatchSize;
6398 pthread_mutex_unlock(&mMutex);
6399 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006400 handleBatchMetadata(metadata_buf,
6401 true /* free_and_bufdone_meta_buf */);
6402 } else { /* mBatchSize = 0 */
6403 hdrPlusPerfLock(metadata_buf);
6404 pthread_mutex_lock(&mMutex);
6405 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006406 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006407 true /* last urgent frame of batch metadata */,
6408 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006409 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006410 pthread_mutex_unlock(&mMutex);
6411 }
6412 } else if (isInputBuffer) {
6413 pthread_mutex_lock(&mMutex);
6414 handleInputBufferWithLock(frame_number);
6415 pthread_mutex_unlock(&mMutex);
6416 } else {
6417 pthread_mutex_lock(&mMutex);
6418 handleBufferWithLock(buffer, frame_number);
6419 pthread_mutex_unlock(&mMutex);
6420 }
6421 return;
6422}
6423
6424/*===========================================================================
6425 * FUNCTION : getReprocessibleOutputStreamId
6426 *
6427 * DESCRIPTION: Get source output stream id for the input reprocess stream
6428 * based on size and format, which would be the largest
6429 * output stream if an input stream exists.
6430 *
6431 * PARAMETERS :
6432 * @id : return the stream id if found
6433 *
6434 * RETURN : int32_t type of status
6435 * NO_ERROR -- success
6436 * none-zero failure code
6437 *==========================================================================*/
6438int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6439{
6440 /* check if any output or bidirectional stream with the same size and format
6441 and return that stream */
6442 if ((mInputStreamInfo.dim.width > 0) &&
6443 (mInputStreamInfo.dim.height > 0)) {
6444 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6445 it != mStreamInfo.end(); it++) {
6446
6447 camera3_stream_t *stream = (*it)->stream;
6448 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6449 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6450 (stream->format == mInputStreamInfo.format)) {
6451 // Usage flag for an input stream and the source output stream
6452 // may be different.
6453 LOGD("Found reprocessible output stream! %p", *it);
6454 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6455 stream->usage, mInputStreamInfo.usage);
6456
6457 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6458 if (channel != NULL && channel->mStreams[0]) {
6459 id = channel->mStreams[0]->getMyServerID();
6460 return NO_ERROR;
6461 }
6462 }
6463 }
6464 } else {
6465 LOGD("No input stream, so no reprocessible output stream");
6466 }
6467 return NAME_NOT_FOUND;
6468}
6469
6470/*===========================================================================
6471 * FUNCTION : lookupFwkName
6472 *
6473 * DESCRIPTION: In case the enum is not same in fwk and backend
6474 * make sure the parameter is correctly propogated
6475 *
6476 * PARAMETERS :
6477 * @arr : map between the two enums
6478 * @len : len of the map
6479 * @hal_name : name of the hal_parm to map
6480 *
6481 * RETURN : int type of status
6482 * fwk_name -- success
6483 * none-zero failure code
6484 *==========================================================================*/
6485template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6486 size_t len, halType hal_name)
6487{
6488
6489 for (size_t i = 0; i < len; i++) {
6490 if (arr[i].hal_name == hal_name) {
6491 return arr[i].fwk_name;
6492 }
6493 }
6494
6495 /* Not able to find matching framework type is not necessarily
6496 * an error case. This happens when mm-camera supports more attributes
6497 * than the frameworks do */
6498 LOGH("Cannot find matching framework type");
6499 return NAME_NOT_FOUND;
6500}
6501
6502/*===========================================================================
6503 * FUNCTION : lookupHalName
6504 *
6505 * DESCRIPTION: In case the enum is not same in fwk and backend
6506 * make sure the parameter is correctly propogated
6507 *
6508 * PARAMETERS :
6509 * @arr : map between the two enums
6510 * @len : len of the map
6511 * @fwk_name : name of the hal_parm to map
6512 *
6513 * RETURN : int32_t type of status
6514 * hal_name -- success
6515 * none-zero failure code
6516 *==========================================================================*/
6517template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6518 size_t len, fwkType fwk_name)
6519{
6520 for (size_t i = 0; i < len; i++) {
6521 if (arr[i].fwk_name == fwk_name) {
6522 return arr[i].hal_name;
6523 }
6524 }
6525
6526 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6527 return NAME_NOT_FOUND;
6528}
6529
6530/*===========================================================================
6531 * FUNCTION : lookupProp
6532 *
6533 * DESCRIPTION: lookup a value by its name
6534 *
6535 * PARAMETERS :
6536 * @arr : map between the two enums
6537 * @len : size of the map
6538 * @name : name to be looked up
6539 *
6540 * RETURN : Value if found
6541 * CAM_CDS_MODE_MAX if not found
6542 *==========================================================================*/
6543template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6544 size_t len, const char *name)
6545{
6546 if (name) {
6547 for (size_t i = 0; i < len; i++) {
6548 if (!strcmp(arr[i].desc, name)) {
6549 return arr[i].val;
6550 }
6551 }
6552 }
6553 return CAM_CDS_MODE_MAX;
6554}
6555
6556/*===========================================================================
6557 *
6558 * DESCRIPTION:
6559 *
6560 * PARAMETERS :
6561 * @metadata : metadata information from callback
6562 * @timestamp: metadata buffer timestamp
6563 * @request_id: request id
6564 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006565 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006566 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6567 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006568 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006569 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6570 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006571 *
6572 * RETURN : camera_metadata_t*
6573 * metadata in a format specified by fwk
6574 *==========================================================================*/
6575camera_metadata_t*
6576QCamera3HardwareInterface::translateFromHalMetadata(
6577 metadata_buffer_t *metadata,
6578 nsecs_t timestamp,
6579 int32_t request_id,
6580 const CameraMetadata& jpegMetadata,
6581 uint8_t pipeline_depth,
6582 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006583 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006584 /* DevCamDebug metadata translateFromHalMetadata argument */
6585 uint8_t DevCamDebug_meta_enable,
6586 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006587 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006588 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006589 bool lastMetadataInBatch,
6590 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006591{
6592 CameraMetadata camMetadata;
6593 camera_metadata_t *resultMetadata;
6594
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006595 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006596 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6597 * Timestamp is needed because it's used for shutter notify calculation.
6598 * */
6599 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6600 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006601 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006602 }
6603
Thierry Strudel3d639192016-09-09 11:52:26 -07006604 if (jpegMetadata.entryCount())
6605 camMetadata.append(jpegMetadata);
6606
6607 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6608 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6609 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6610 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006611 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006612 if (mBatchSize == 0) {
6613 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6614 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6615 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006616
Samuel Ha68ba5172016-12-15 18:41:12 -08006617 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6618 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6619 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6620 // DevCamDebug metadata translateFromHalMetadata AF
6621 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6622 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6623 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6624 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6625 }
6626 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6627 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6628 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6629 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6630 }
6631 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6632 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6633 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6634 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6635 }
6636 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6637 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6638 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6639 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6640 }
6641 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6642 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6643 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6644 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6645 }
6646 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6647 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6648 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6649 *DevCamDebug_af_monitor_pdaf_target_pos;
6650 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6651 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6652 }
6653 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6654 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6655 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6656 *DevCamDebug_af_monitor_pdaf_confidence;
6657 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6658 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6659 }
6660 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6661 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6662 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6663 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6664 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6665 }
6666 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6667 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6668 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6669 *DevCamDebug_af_monitor_tof_target_pos;
6670 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6671 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6672 }
6673 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6674 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6675 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6676 *DevCamDebug_af_monitor_tof_confidence;
6677 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6678 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6679 }
6680 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6681 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6682 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6683 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6684 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6685 }
6686 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6687 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6688 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6689 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6690 &fwk_DevCamDebug_af_monitor_type_select, 1);
6691 }
6692 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6693 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6694 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6695 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6696 &fwk_DevCamDebug_af_monitor_refocus, 1);
6697 }
6698 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6699 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6700 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6701 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6702 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6703 }
6704 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6705 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6706 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6707 *DevCamDebug_af_search_pdaf_target_pos;
6708 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6709 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6710 }
6711 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6712 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6713 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6714 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6715 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6716 }
6717 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6718 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6719 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6720 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6721 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6722 }
6723 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6724 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6725 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6726 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6727 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6728 }
6729 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6730 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6731 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6732 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6733 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6734 }
6735 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6736 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6737 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6738 *DevCamDebug_af_search_tof_target_pos;
6739 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6740 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6741 }
6742 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6743 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6744 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6745 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6746 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6747 }
6748 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6749 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6750 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6751 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6752 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6753 }
6754 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6755 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6756 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6757 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6758 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6759 }
6760 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6761 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6762 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6763 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6764 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6765 }
6766 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6767 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6768 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6769 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6770 &fwk_DevCamDebug_af_search_type_select, 1);
6771 }
6772 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6773 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6774 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6775 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6776 &fwk_DevCamDebug_af_search_next_pos, 1);
6777 }
6778 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6779 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6780 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6781 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6782 &fwk_DevCamDebug_af_search_target_pos, 1);
6783 }
6784 // DevCamDebug metadata translateFromHalMetadata AEC
6785 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6786 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6787 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6788 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6789 }
6790 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6791 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6792 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6793 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6794 }
6795 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6796 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6797 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6798 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6799 }
6800 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6801 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6802 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6803 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6804 }
6805 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6806 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6807 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6808 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6809 }
6810 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6811 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6812 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6813 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6814 }
6815 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6816 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6817 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6818 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6819 }
6820 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6821 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6822 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6823 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6824 }
Samuel Ha34229982017-02-17 13:51:11 -08006825 // DevCamDebug metadata translateFromHalMetadata zzHDR
6826 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6827 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6828 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6829 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6832 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006833 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006834 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6835 }
6836 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6837 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6838 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6839 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6840 }
6841 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6842 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006843 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006844 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6845 }
6846 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6847 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6848 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6849 *DevCamDebug_aec_hdr_sensitivity_ratio;
6850 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6851 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6852 }
6853 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6854 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6855 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6856 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6857 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6858 }
6859 // DevCamDebug metadata translateFromHalMetadata ADRC
6860 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6861 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6862 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6863 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6864 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6865 }
6866 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6867 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6868 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6869 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6870 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6871 }
6872 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6873 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6874 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6875 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6876 }
6877 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6878 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6879 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6880 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6881 }
6882 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6883 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6884 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6885 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6886 }
6887 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6888 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6889 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6890 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6891 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006892 // DevCamDebug metadata translateFromHalMetadata AWB
6893 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6894 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6895 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6896 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6897 }
6898 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6899 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6900 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6901 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6902 }
6903 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6904 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6905 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6906 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6907 }
6908 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6909 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6910 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6911 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6912 }
6913 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6914 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6915 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6916 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6917 }
6918 }
6919 // atrace_end(ATRACE_TAG_ALWAYS);
6920
Thierry Strudel3d639192016-09-09 11:52:26 -07006921 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6922 int64_t fwk_frame_number = *frame_number;
6923 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6924 }
6925
6926 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6927 int32_t fps_range[2];
6928 fps_range[0] = (int32_t)float_range->min_fps;
6929 fps_range[1] = (int32_t)float_range->max_fps;
6930 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6931 fps_range, 2);
6932 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6933 fps_range[0], fps_range[1]);
6934 }
6935
6936 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6937 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6938 }
6939
6940 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6941 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6942 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6943 *sceneMode);
6944 if (NAME_NOT_FOUND != val) {
6945 uint8_t fwkSceneMode = (uint8_t)val;
6946 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6947 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6948 fwkSceneMode);
6949 }
6950 }
6951
6952 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6953 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6954 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6955 }
6956
6957 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6958 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6959 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6960 }
6961
6962 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6963 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6964 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6965 }
6966
6967 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6968 CAM_INTF_META_EDGE_MODE, metadata) {
6969 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6970 }
6971
6972 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6973 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6974 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6975 }
6976
6977 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6978 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6979 }
6980
6981 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6982 if (0 <= *flashState) {
6983 uint8_t fwk_flashState = (uint8_t) *flashState;
6984 if (!gCamCapability[mCameraId]->flash_available) {
6985 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6986 }
6987 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6988 }
6989 }
6990
6991 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6992 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6993 if (NAME_NOT_FOUND != val) {
6994 uint8_t fwk_flashMode = (uint8_t)val;
6995 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6996 }
6997 }
6998
6999 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7000 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7001 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7002 }
7003
7004 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7005 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7006 }
7007
7008 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7009 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7010 }
7011
7012 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7013 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7014 }
7015
7016 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7017 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7018 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7019 }
7020
7021 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7022 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7023 LOGD("fwk_videoStab = %d", fwk_videoStab);
7024 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7025 } else {
7026 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7027 // and so hardcoding the Video Stab result to OFF mode.
7028 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7029 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007030 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007031 }
7032
7033 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7034 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7035 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7036 }
7037
7038 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7039 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7040 }
7041
Thierry Strudel3d639192016-09-09 11:52:26 -07007042 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7043 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007044 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007045
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007046 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7047 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007048
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007049 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007050 blackLevelAppliedPattern->cam_black_level[0],
7051 blackLevelAppliedPattern->cam_black_level[1],
7052 blackLevelAppliedPattern->cam_black_level[2],
7053 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007054 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7055 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007056
7057#ifndef USE_HAL_3_3
7058 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307059 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007060 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307061 fwk_blackLevelInd[0] /= 16.0;
7062 fwk_blackLevelInd[1] /= 16.0;
7063 fwk_blackLevelInd[2] /= 16.0;
7064 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007065 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7066 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007067#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007068 }
7069
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007070#ifndef USE_HAL_3_3
7071 // Fixed whitelevel is used by ISP/Sensor
7072 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7073 &gCamCapability[mCameraId]->white_level, 1);
7074#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007075
7076 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7077 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7078 int32_t scalerCropRegion[4];
7079 scalerCropRegion[0] = hScalerCropRegion->left;
7080 scalerCropRegion[1] = hScalerCropRegion->top;
7081 scalerCropRegion[2] = hScalerCropRegion->width;
7082 scalerCropRegion[3] = hScalerCropRegion->height;
7083
7084 // Adjust crop region from sensor output coordinate system to active
7085 // array coordinate system.
7086 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7087 scalerCropRegion[2], scalerCropRegion[3]);
7088
7089 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7090 }
7091
7092 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7093 LOGD("sensorExpTime = %lld", *sensorExpTime);
7094 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7095 }
7096
7097 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7098 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7099 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7100 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7101 }
7102
7103 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7104 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7105 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7106 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7107 sensorRollingShutterSkew, 1);
7108 }
7109
7110 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7111 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7112 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7113
7114 //calculate the noise profile based on sensitivity
7115 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7116 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7117 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7118 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7119 noise_profile[i] = noise_profile_S;
7120 noise_profile[i+1] = noise_profile_O;
7121 }
7122 LOGD("noise model entry (S, O) is (%f, %f)",
7123 noise_profile_S, noise_profile_O);
7124 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7125 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7126 }
7127
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007128#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007129 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007130 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007131 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007133 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7134 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7135 }
7136 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007137#endif
7138
Thierry Strudel3d639192016-09-09 11:52:26 -07007139 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7140 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7141 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7142 }
7143
7144 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7145 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7146 *faceDetectMode);
7147 if (NAME_NOT_FOUND != val) {
7148 uint8_t fwk_faceDetectMode = (uint8_t)val;
7149 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7150
7151 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7152 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7153 CAM_INTF_META_FACE_DETECTION, metadata) {
7154 uint8_t numFaces = MIN(
7155 faceDetectionInfo->num_faces_detected, MAX_ROI);
7156 int32_t faceIds[MAX_ROI];
7157 uint8_t faceScores[MAX_ROI];
7158 int32_t faceRectangles[MAX_ROI * 4];
7159 int32_t faceLandmarks[MAX_ROI * 6];
7160 size_t j = 0, k = 0;
7161
7162 for (size_t i = 0; i < numFaces; i++) {
7163 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7164 // Adjust crop region from sensor output coordinate system to active
7165 // array coordinate system.
7166 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7167 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7168 rect.width, rect.height);
7169
7170 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7171 faceRectangles+j, -1);
7172
Jason Lee8ce36fa2017-04-19 19:40:37 -07007173 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7174 "bottom-right (%d, %d)",
7175 faceDetectionInfo->frame_id, i,
7176 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7177 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7178
Thierry Strudel3d639192016-09-09 11:52:26 -07007179 j+= 4;
7180 }
7181 if (numFaces <= 0) {
7182 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7183 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7184 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7185 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7186 }
7187
7188 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7189 numFaces);
7190 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7191 faceRectangles, numFaces * 4U);
7192 if (fwk_faceDetectMode ==
7193 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7194 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7195 CAM_INTF_META_FACE_LANDMARK, metadata) {
7196
7197 for (size_t i = 0; i < numFaces; i++) {
7198 // Map the co-ordinate sensor output coordinate system to active
7199 // array coordinate system.
7200 mCropRegionMapper.toActiveArray(
7201 landmarks->face_landmarks[i].left_eye_center.x,
7202 landmarks->face_landmarks[i].left_eye_center.y);
7203 mCropRegionMapper.toActiveArray(
7204 landmarks->face_landmarks[i].right_eye_center.x,
7205 landmarks->face_landmarks[i].right_eye_center.y);
7206 mCropRegionMapper.toActiveArray(
7207 landmarks->face_landmarks[i].mouth_center.x,
7208 landmarks->face_landmarks[i].mouth_center.y);
7209
7210 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007211
7212 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7213 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7214 faceDetectionInfo->frame_id, i,
7215 faceLandmarks[k + LEFT_EYE_X],
7216 faceLandmarks[k + LEFT_EYE_Y],
7217 faceLandmarks[k + RIGHT_EYE_X],
7218 faceLandmarks[k + RIGHT_EYE_Y],
7219 faceLandmarks[k + MOUTH_X],
7220 faceLandmarks[k + MOUTH_Y]);
7221
Thierry Strudel04e026f2016-10-10 11:27:36 -07007222 k+= TOTAL_LANDMARK_INDICES;
7223 }
7224 } else {
7225 for (size_t i = 0; i < numFaces; i++) {
7226 setInvalidLandmarks(faceLandmarks+k);
7227 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007228 }
7229 }
7230
Jason Lee49619db2017-04-13 12:07:22 -07007231 for (size_t i = 0; i < numFaces; i++) {
7232 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7233
7234 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7235 faceDetectionInfo->frame_id, i, faceIds[i]);
7236 }
7237
Thierry Strudel3d639192016-09-09 11:52:26 -07007238 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7239 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7240 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007241 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007242 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7243 CAM_INTF_META_FACE_BLINK, metadata) {
7244 uint8_t detected[MAX_ROI];
7245 uint8_t degree[MAX_ROI * 2];
7246 for (size_t i = 0; i < numFaces; i++) {
7247 detected[i] = blinks->blink[i].blink_detected;
7248 degree[2 * i] = blinks->blink[i].left_blink;
7249 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007250
Jason Lee49619db2017-04-13 12:07:22 -07007251 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7252 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7253 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7254 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007255 }
7256 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7257 detected, numFaces);
7258 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7259 degree, numFaces * 2);
7260 }
7261 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7262 CAM_INTF_META_FACE_SMILE, metadata) {
7263 uint8_t degree[MAX_ROI];
7264 uint8_t confidence[MAX_ROI];
7265 for (size_t i = 0; i < numFaces; i++) {
7266 degree[i] = smiles->smile[i].smile_degree;
7267 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007268
Jason Lee49619db2017-04-13 12:07:22 -07007269 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7270 "smile_degree=%d, smile_score=%d",
7271 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007272 }
7273 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7274 degree, numFaces);
7275 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7276 confidence, numFaces);
7277 }
7278 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7279 CAM_INTF_META_FACE_GAZE, metadata) {
7280 int8_t angle[MAX_ROI];
7281 int32_t direction[MAX_ROI * 3];
7282 int8_t degree[MAX_ROI * 2];
7283 for (size_t i = 0; i < numFaces; i++) {
7284 angle[i] = gazes->gaze[i].gaze_angle;
7285 direction[3 * i] = gazes->gaze[i].updown_dir;
7286 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7287 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7288 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7289 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007290
7291 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7292 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7293 "left_right_gaze=%d, top_bottom_gaze=%d",
7294 faceDetectionInfo->frame_id, i, angle[i],
7295 direction[3 * i], direction[3 * i + 1],
7296 direction[3 * i + 2],
7297 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007298 }
7299 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7300 (uint8_t *)angle, numFaces);
7301 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7302 direction, numFaces * 3);
7303 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7304 (uint8_t *)degree, numFaces * 2);
7305 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007306 }
7307 }
7308 }
7309 }
7310
7311 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7312 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007313 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007314 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007315 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007316
Shuzhen Wang14415f52016-11-16 18:26:18 -08007317 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7318 histogramBins = *histBins;
7319 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7320 }
7321
7322 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007323 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7324 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007325 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007326
7327 switch (stats_data->type) {
7328 case CAM_HISTOGRAM_TYPE_BAYER:
7329 switch (stats_data->bayer_stats.data_type) {
7330 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007331 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7332 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007333 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007334 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7335 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007336 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007337 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7338 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007339 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007340 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007341 case CAM_STATS_CHANNEL_R:
7342 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007343 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7344 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007345 }
7346 break;
7347 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007348 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007349 break;
7350 }
7351
Shuzhen Wang14415f52016-11-16 18:26:18 -08007352 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007353 }
7354 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007355 }
7356
7357 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7358 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7359 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7360 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7361 }
7362
7363 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7364 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7365 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7366 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7367 }
7368
7369 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7370 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7371 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7372 CAM_MAX_SHADING_MAP_HEIGHT);
7373 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7374 CAM_MAX_SHADING_MAP_WIDTH);
7375 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7376 lensShadingMap->lens_shading, 4U * map_width * map_height);
7377 }
7378
7379 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7380 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7381 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7382 }
7383
7384 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7385 //Populate CAM_INTF_META_TONEMAP_CURVES
7386 /* ch0 = G, ch 1 = B, ch 2 = R*/
7387 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7388 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7389 tonemap->tonemap_points_cnt,
7390 CAM_MAX_TONEMAP_CURVE_SIZE);
7391 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7392 }
7393
7394 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7395 &tonemap->curves[0].tonemap_points[0][0],
7396 tonemap->tonemap_points_cnt * 2);
7397
7398 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7399 &tonemap->curves[1].tonemap_points[0][0],
7400 tonemap->tonemap_points_cnt * 2);
7401
7402 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7403 &tonemap->curves[2].tonemap_points[0][0],
7404 tonemap->tonemap_points_cnt * 2);
7405 }
7406
7407 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7408 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7409 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7410 CC_GAIN_MAX);
7411 }
7412
7413 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7414 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7415 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7416 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7417 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7418 }
7419
7420 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7421 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7422 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7423 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7424 toneCurve->tonemap_points_cnt,
7425 CAM_MAX_TONEMAP_CURVE_SIZE);
7426 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7427 }
7428 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7429 (float*)toneCurve->curve.tonemap_points,
7430 toneCurve->tonemap_points_cnt * 2);
7431 }
7432
7433 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7434 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7435 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7436 predColorCorrectionGains->gains, 4);
7437 }
7438
7439 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7440 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7441 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7442 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7443 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7444 }
7445
7446 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7447 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7448 }
7449
7450 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7451 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7452 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7453 }
7454
7455 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7456 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7457 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7458 }
7459
7460 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7461 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7462 *effectMode);
7463 if (NAME_NOT_FOUND != val) {
7464 uint8_t fwk_effectMode = (uint8_t)val;
7465 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7466 }
7467 }
7468
7469 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7470 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7471 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7472 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7473 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7474 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7475 }
7476 int32_t fwk_testPatternData[4];
7477 fwk_testPatternData[0] = testPatternData->r;
7478 fwk_testPatternData[3] = testPatternData->b;
7479 switch (gCamCapability[mCameraId]->color_arrangement) {
7480 case CAM_FILTER_ARRANGEMENT_RGGB:
7481 case CAM_FILTER_ARRANGEMENT_GRBG:
7482 fwk_testPatternData[1] = testPatternData->gr;
7483 fwk_testPatternData[2] = testPatternData->gb;
7484 break;
7485 case CAM_FILTER_ARRANGEMENT_GBRG:
7486 case CAM_FILTER_ARRANGEMENT_BGGR:
7487 fwk_testPatternData[2] = testPatternData->gr;
7488 fwk_testPatternData[1] = testPatternData->gb;
7489 break;
7490 default:
7491 LOGE("color arrangement %d is not supported",
7492 gCamCapability[mCameraId]->color_arrangement);
7493 break;
7494 }
7495 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7496 }
7497
7498 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7499 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7500 }
7501
7502 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7503 String8 str((const char *)gps_methods);
7504 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7505 }
7506
7507 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7508 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7509 }
7510
7511 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7512 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7513 }
7514
7515 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7516 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7517 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7518 }
7519
7520 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7521 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7522 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7523 }
7524
7525 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7526 int32_t fwk_thumb_size[2];
7527 fwk_thumb_size[0] = thumb_size->width;
7528 fwk_thumb_size[1] = thumb_size->height;
7529 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7530 }
7531
7532 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7533 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7534 privateData,
7535 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7536 }
7537
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007538 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007539 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007540 meteringMode, 1);
7541 }
7542
Thierry Strudel54dc9782017-02-15 12:12:10 -08007543 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7544 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7545 LOGD("hdr_scene_data: %d %f\n",
7546 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7547 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7548 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7549 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7550 &isHdr, 1);
7551 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7552 &isHdrConfidence, 1);
7553 }
7554
7555
7556
Thierry Strudel3d639192016-09-09 11:52:26 -07007557 if (metadata->is_tuning_params_valid) {
7558 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7559 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7560 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7561
7562
7563 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7564 sizeof(uint32_t));
7565 data += sizeof(uint32_t);
7566
7567 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7568 sizeof(uint32_t));
7569 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7570 data += sizeof(uint32_t);
7571
7572 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7573 sizeof(uint32_t));
7574 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7575 data += sizeof(uint32_t);
7576
7577 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7578 sizeof(uint32_t));
7579 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7580 data += sizeof(uint32_t);
7581
7582 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7583 sizeof(uint32_t));
7584 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7585 data += sizeof(uint32_t);
7586
7587 metadata->tuning_params.tuning_mod3_data_size = 0;
7588 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7589 sizeof(uint32_t));
7590 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7591 data += sizeof(uint32_t);
7592
7593 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7594 TUNING_SENSOR_DATA_MAX);
7595 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7596 count);
7597 data += count;
7598
7599 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7600 TUNING_VFE_DATA_MAX);
7601 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7602 count);
7603 data += count;
7604
7605 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7606 TUNING_CPP_DATA_MAX);
7607 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7608 count);
7609 data += count;
7610
7611 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7612 TUNING_CAC_DATA_MAX);
7613 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7614 count);
7615 data += count;
7616
7617 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7618 (int32_t *)(void *)tuning_meta_data_blob,
7619 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7620 }
7621
7622 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7623 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7624 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7625 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7626 NEUTRAL_COL_POINTS);
7627 }
7628
7629 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7630 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7631 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7632 }
7633
7634 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7635 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7636 // Adjust crop region from sensor output coordinate system to active
7637 // array coordinate system.
7638 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7639 hAeRegions->rect.width, hAeRegions->rect.height);
7640
7641 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7642 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7643 REGIONS_TUPLE_COUNT);
7644 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7645 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7646 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7647 hAeRegions->rect.height);
7648 }
7649
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007650 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7651 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7652 if (NAME_NOT_FOUND != val) {
7653 uint8_t fwkAfMode = (uint8_t)val;
7654 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7655 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7656 } else {
7657 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7658 val);
7659 }
7660 }
7661
Thierry Strudel3d639192016-09-09 11:52:26 -07007662 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7663 uint8_t fwk_afState = (uint8_t) *afState;
7664 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007665 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007666 }
7667
7668 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7669 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7670 }
7671
7672 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7673 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7674 }
7675
7676 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7677 uint8_t fwk_lensState = *lensState;
7678 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7679 }
7680
Thierry Strudel3d639192016-09-09 11:52:26 -07007681
7682 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007683 uint32_t ab_mode = *hal_ab_mode;
7684 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7685 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7686 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7687 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007688 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007689 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007690 if (NAME_NOT_FOUND != val) {
7691 uint8_t fwk_ab_mode = (uint8_t)val;
7692 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7693 }
7694 }
7695
7696 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7697 int val = lookupFwkName(SCENE_MODES_MAP,
7698 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7699 if (NAME_NOT_FOUND != val) {
7700 uint8_t fwkBestshotMode = (uint8_t)val;
7701 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7702 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7703 } else {
7704 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7705 }
7706 }
7707
7708 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7709 uint8_t fwk_mode = (uint8_t) *mode;
7710 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7711 }
7712
7713 /* Constant metadata values to be update*/
7714 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7715 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7716
7717 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7718 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7719
7720 int32_t hotPixelMap[2];
7721 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7722
7723 // CDS
7724 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7725 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7726 }
7727
Thierry Strudel04e026f2016-10-10 11:27:36 -07007728 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7729 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007730 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007731 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7732 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7733 } else {
7734 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7735 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007736
7737 if(fwk_hdr != curr_hdr_state) {
7738 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7739 if(fwk_hdr)
7740 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7741 else
7742 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7743 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007744 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7745 }
7746
Thierry Strudel54dc9782017-02-15 12:12:10 -08007747 //binning correction
7748 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7749 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7750 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7751 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7752 }
7753
Thierry Strudel04e026f2016-10-10 11:27:36 -07007754 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007755 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007756 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7757 int8_t is_ir_on = 0;
7758
7759 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7760 if(is_ir_on != curr_ir_state) {
7761 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7762 if(is_ir_on)
7763 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7764 else
7765 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7766 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007767 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007768 }
7769
Thierry Strudel269c81a2016-10-12 12:13:59 -07007770 // AEC SPEED
7771 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7772 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7773 }
7774
7775 // AWB SPEED
7776 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7777 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7778 }
7779
Thierry Strudel3d639192016-09-09 11:52:26 -07007780 // TNR
7781 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7782 uint8_t tnr_enable = tnr->denoise_enable;
7783 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007784 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7785 int8_t is_tnr_on = 0;
7786
7787 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7788 if(is_tnr_on != curr_tnr_state) {
7789 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7790 if(is_tnr_on)
7791 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7792 else
7793 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7794 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007795
7796 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7797 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7798 }
7799
7800 // Reprocess crop data
7801 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7802 uint8_t cnt = crop_data->num_of_streams;
7803 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7804 // mm-qcamera-daemon only posts crop_data for streams
7805 // not linked to pproc. So no valid crop metadata is not
7806 // necessarily an error case.
7807 LOGD("No valid crop metadata entries");
7808 } else {
7809 uint32_t reproc_stream_id;
7810 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7811 LOGD("No reprocessible stream found, ignore crop data");
7812 } else {
7813 int rc = NO_ERROR;
7814 Vector<int32_t> roi_map;
7815 int32_t *crop = new int32_t[cnt*4];
7816 if (NULL == crop) {
7817 rc = NO_MEMORY;
7818 }
7819 if (NO_ERROR == rc) {
7820 int32_t streams_found = 0;
7821 for (size_t i = 0; i < cnt; i++) {
7822 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7823 if (pprocDone) {
7824 // HAL already does internal reprocessing,
7825 // either via reprocessing before JPEG encoding,
7826 // or offline postprocessing for pproc bypass case.
7827 crop[0] = 0;
7828 crop[1] = 0;
7829 crop[2] = mInputStreamInfo.dim.width;
7830 crop[3] = mInputStreamInfo.dim.height;
7831 } else {
7832 crop[0] = crop_data->crop_info[i].crop.left;
7833 crop[1] = crop_data->crop_info[i].crop.top;
7834 crop[2] = crop_data->crop_info[i].crop.width;
7835 crop[3] = crop_data->crop_info[i].crop.height;
7836 }
7837 roi_map.add(crop_data->crop_info[i].roi_map.left);
7838 roi_map.add(crop_data->crop_info[i].roi_map.top);
7839 roi_map.add(crop_data->crop_info[i].roi_map.width);
7840 roi_map.add(crop_data->crop_info[i].roi_map.height);
7841 streams_found++;
7842 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7843 crop[0], crop[1], crop[2], crop[3]);
7844 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7845 crop_data->crop_info[i].roi_map.left,
7846 crop_data->crop_info[i].roi_map.top,
7847 crop_data->crop_info[i].roi_map.width,
7848 crop_data->crop_info[i].roi_map.height);
7849 break;
7850
7851 }
7852 }
7853 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7854 &streams_found, 1);
7855 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7856 crop, (size_t)(streams_found * 4));
7857 if (roi_map.array()) {
7858 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7859 roi_map.array(), roi_map.size());
7860 }
7861 }
7862 if (crop) {
7863 delete [] crop;
7864 }
7865 }
7866 }
7867 }
7868
7869 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7870 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7871 // so hardcoding the CAC result to OFF mode.
7872 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7873 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7874 } else {
7875 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7876 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7877 *cacMode);
7878 if (NAME_NOT_FOUND != val) {
7879 uint8_t resultCacMode = (uint8_t)val;
7880 // check whether CAC result from CB is equal to Framework set CAC mode
7881 // If not equal then set the CAC mode came in corresponding request
7882 if (fwk_cacMode != resultCacMode) {
7883 resultCacMode = fwk_cacMode;
7884 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007885 //Check if CAC is disabled by property
7886 if (m_cacModeDisabled) {
7887 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7888 }
7889
Thierry Strudel3d639192016-09-09 11:52:26 -07007890 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7891 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7892 } else {
7893 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7894 }
7895 }
7896 }
7897
7898 // Post blob of cam_cds_data through vendor tag.
7899 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7900 uint8_t cnt = cdsInfo->num_of_streams;
7901 cam_cds_data_t cdsDataOverride;
7902 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7903 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7904 cdsDataOverride.num_of_streams = 1;
7905 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7906 uint32_t reproc_stream_id;
7907 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7908 LOGD("No reprocessible stream found, ignore cds data");
7909 } else {
7910 for (size_t i = 0; i < cnt; i++) {
7911 if (cdsInfo->cds_info[i].stream_id ==
7912 reproc_stream_id) {
7913 cdsDataOverride.cds_info[0].cds_enable =
7914 cdsInfo->cds_info[i].cds_enable;
7915 break;
7916 }
7917 }
7918 }
7919 } else {
7920 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7921 }
7922 camMetadata.update(QCAMERA3_CDS_INFO,
7923 (uint8_t *)&cdsDataOverride,
7924 sizeof(cam_cds_data_t));
7925 }
7926
7927 // Ldaf calibration data
7928 if (!mLdafCalibExist) {
7929 IF_META_AVAILABLE(uint32_t, ldafCalib,
7930 CAM_INTF_META_LDAF_EXIF, metadata) {
7931 mLdafCalibExist = true;
7932 mLdafCalib[0] = ldafCalib[0];
7933 mLdafCalib[1] = ldafCalib[1];
7934 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7935 ldafCalib[0], ldafCalib[1]);
7936 }
7937 }
7938
Thierry Strudel54dc9782017-02-15 12:12:10 -08007939 // EXIF debug data through vendor tag
7940 /*
7941 * Mobicat Mask can assume 3 values:
7942 * 1 refers to Mobicat data,
7943 * 2 refers to Stats Debug and Exif Debug Data
7944 * 3 refers to Mobicat and Stats Debug Data
7945 * We want to make sure that we are sending Exif debug data
7946 * only when Mobicat Mask is 2.
7947 */
7948 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7949 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7950 (uint8_t *)(void *)mExifParams.debug_params,
7951 sizeof(mm_jpeg_debug_exif_params_t));
7952 }
7953
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007954 // Reprocess and DDM debug data through vendor tag
7955 cam_reprocess_info_t repro_info;
7956 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007957 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7958 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007959 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007960 }
7961 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7962 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007963 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007964 }
7965 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7966 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007967 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007968 }
7969 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7970 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007971 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007972 }
7973 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7974 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007975 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007976 }
7977 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007978 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007979 }
7980 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7981 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007982 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007983 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007984 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7985 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7986 }
7987 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7988 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7989 }
7990 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7991 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007992
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007993 // INSTANT AEC MODE
7994 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7995 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7996 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7997 }
7998
Shuzhen Wange763e802016-03-31 10:24:29 -07007999 // AF scene change
8000 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8001 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8002 }
8003
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008004 // Enable ZSL
8005 if (enableZsl != nullptr) {
8006 uint8_t value = *enableZsl ?
8007 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8008 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8009 }
8010
Xu Han821ea9c2017-05-23 09:00:40 -07008011 // OIS Data
8012 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8013 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8014 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8015 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8016 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8017 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8018 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8019 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8020 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8021 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8022 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8023 }
8024
Thierry Strudel3d639192016-09-09 11:52:26 -07008025 resultMetadata = camMetadata.release();
8026 return resultMetadata;
8027}
8028
8029/*===========================================================================
8030 * FUNCTION : saveExifParams
8031 *
8032 * DESCRIPTION:
8033 *
8034 * PARAMETERS :
8035 * @metadata : metadata information from callback
8036 *
8037 * RETURN : none
8038 *
8039 *==========================================================================*/
8040void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8041{
8042 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8043 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8044 if (mExifParams.debug_params) {
8045 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8046 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8047 }
8048 }
8049 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8050 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8051 if (mExifParams.debug_params) {
8052 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8053 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8054 }
8055 }
8056 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8057 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8058 if (mExifParams.debug_params) {
8059 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8060 mExifParams.debug_params->af_debug_params_valid = TRUE;
8061 }
8062 }
8063 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8064 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8065 if (mExifParams.debug_params) {
8066 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8067 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8068 }
8069 }
8070 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8071 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8072 if (mExifParams.debug_params) {
8073 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8074 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8075 }
8076 }
8077 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8078 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8079 if (mExifParams.debug_params) {
8080 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8081 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8082 }
8083 }
8084 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8085 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8086 if (mExifParams.debug_params) {
8087 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8088 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8089 }
8090 }
8091 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8092 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8093 if (mExifParams.debug_params) {
8094 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8095 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8096 }
8097 }
8098}
8099
8100/*===========================================================================
8101 * FUNCTION : get3AExifParams
8102 *
8103 * DESCRIPTION:
8104 *
8105 * PARAMETERS : none
8106 *
8107 *
8108 * RETURN : mm_jpeg_exif_params_t
8109 *
8110 *==========================================================================*/
8111mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8112{
8113 return mExifParams;
8114}
8115
8116/*===========================================================================
8117 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8118 *
8119 * DESCRIPTION:
8120 *
8121 * PARAMETERS :
8122 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008123 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8124 * urgent metadata in a batch. Always true for
8125 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008126 *
8127 * RETURN : camera_metadata_t*
8128 * metadata in a format specified by fwk
8129 *==========================================================================*/
8130camera_metadata_t*
8131QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008132 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008133{
8134 CameraMetadata camMetadata;
8135 camera_metadata_t *resultMetadata;
8136
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008137 if (!lastUrgentMetadataInBatch) {
8138 /* In batch mode, use empty metadata if this is not the last in batch
8139 */
8140 resultMetadata = allocate_camera_metadata(0, 0);
8141 return resultMetadata;
8142 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008143
8144 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8145 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8146 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8147 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8148 }
8149
8150 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8151 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8152 &aecTrigger->trigger, 1);
8153 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8154 &aecTrigger->trigger_id, 1);
8155 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8156 aecTrigger->trigger);
8157 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8158 aecTrigger->trigger_id);
8159 }
8160
8161 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8162 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8163 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8164 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8165 }
8166
Thierry Strudel3d639192016-09-09 11:52:26 -07008167 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8168 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8169 &af_trigger->trigger, 1);
8170 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8171 af_trigger->trigger);
8172 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8173 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8174 af_trigger->trigger_id);
8175 }
8176
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008177 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8178 /*af regions*/
8179 int32_t afRegions[REGIONS_TUPLE_COUNT];
8180 // Adjust crop region from sensor output coordinate system to active
8181 // array coordinate system.
8182 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8183 hAfRegions->rect.width, hAfRegions->rect.height);
8184
8185 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8186 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8187 REGIONS_TUPLE_COUNT);
8188 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8189 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8190 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8191 hAfRegions->rect.height);
8192 }
8193
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008194 // AF region confidence
8195 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8196 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8197 }
8198
Thierry Strudel3d639192016-09-09 11:52:26 -07008199 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8200 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8201 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8202 if (NAME_NOT_FOUND != val) {
8203 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8204 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8205 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8206 } else {
8207 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8208 }
8209 }
8210
8211 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8212 uint32_t aeMode = CAM_AE_MODE_MAX;
8213 int32_t flashMode = CAM_FLASH_MODE_MAX;
8214 int32_t redeye = -1;
8215 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8216 aeMode = *pAeMode;
8217 }
8218 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8219 flashMode = *pFlashMode;
8220 }
8221 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8222 redeye = *pRedeye;
8223 }
8224
8225 if (1 == redeye) {
8226 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8227 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8228 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8229 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8230 flashMode);
8231 if (NAME_NOT_FOUND != val) {
8232 fwk_aeMode = (uint8_t)val;
8233 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8234 } else {
8235 LOGE("Unsupported flash mode %d", flashMode);
8236 }
8237 } else if (aeMode == CAM_AE_MODE_ON) {
8238 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8239 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8240 } else if (aeMode == CAM_AE_MODE_OFF) {
8241 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8242 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008243 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8244 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8245 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008246 } else {
8247 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8248 "flashMode:%d, aeMode:%u!!!",
8249 redeye, flashMode, aeMode);
8250 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008251 if (mInstantAEC) {
8252 // Increment frame Idx count untill a bound reached for instant AEC.
8253 mInstantAecFrameIdxCount++;
8254 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8255 CAM_INTF_META_AEC_INFO, metadata) {
8256 LOGH("ae_params->settled = %d",ae_params->settled);
8257 // If AEC settled, or if number of frames reached bound value,
8258 // should reset instant AEC.
8259 if (ae_params->settled ||
8260 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8261 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8262 mInstantAEC = false;
8263 mResetInstantAEC = true;
8264 mInstantAecFrameIdxCount = 0;
8265 }
8266 }
8267 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008268 resultMetadata = camMetadata.release();
8269 return resultMetadata;
8270}
8271
8272/*===========================================================================
8273 * FUNCTION : dumpMetadataToFile
8274 *
8275 * DESCRIPTION: Dumps tuning metadata to file system
8276 *
8277 * PARAMETERS :
8278 * @meta : tuning metadata
8279 * @dumpFrameCount : current dump frame count
8280 * @enabled : Enable mask
8281 *
8282 *==========================================================================*/
8283void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8284 uint32_t &dumpFrameCount,
8285 bool enabled,
8286 const char *type,
8287 uint32_t frameNumber)
8288{
8289 //Some sanity checks
8290 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8291 LOGE("Tuning sensor data size bigger than expected %d: %d",
8292 meta.tuning_sensor_data_size,
8293 TUNING_SENSOR_DATA_MAX);
8294 return;
8295 }
8296
8297 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8298 LOGE("Tuning VFE data size bigger than expected %d: %d",
8299 meta.tuning_vfe_data_size,
8300 TUNING_VFE_DATA_MAX);
8301 return;
8302 }
8303
8304 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8305 LOGE("Tuning CPP data size bigger than expected %d: %d",
8306 meta.tuning_cpp_data_size,
8307 TUNING_CPP_DATA_MAX);
8308 return;
8309 }
8310
8311 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8312 LOGE("Tuning CAC data size bigger than expected %d: %d",
8313 meta.tuning_cac_data_size,
8314 TUNING_CAC_DATA_MAX);
8315 return;
8316 }
8317 //
8318
8319 if(enabled){
8320 char timeBuf[FILENAME_MAX];
8321 char buf[FILENAME_MAX];
8322 memset(buf, 0, sizeof(buf));
8323 memset(timeBuf, 0, sizeof(timeBuf));
8324 time_t current_time;
8325 struct tm * timeinfo;
8326 time (&current_time);
8327 timeinfo = localtime (&current_time);
8328 if (timeinfo != NULL) {
8329 strftime (timeBuf, sizeof(timeBuf),
8330 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8331 }
8332 String8 filePath(timeBuf);
8333 snprintf(buf,
8334 sizeof(buf),
8335 "%dm_%s_%d.bin",
8336 dumpFrameCount,
8337 type,
8338 frameNumber);
8339 filePath.append(buf);
8340 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8341 if (file_fd >= 0) {
8342 ssize_t written_len = 0;
8343 meta.tuning_data_version = TUNING_DATA_VERSION;
8344 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8345 written_len += write(file_fd, data, sizeof(uint32_t));
8346 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8347 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8348 written_len += write(file_fd, data, sizeof(uint32_t));
8349 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8350 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8351 written_len += write(file_fd, data, sizeof(uint32_t));
8352 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8353 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8354 written_len += write(file_fd, data, sizeof(uint32_t));
8355 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8356 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8357 written_len += write(file_fd, data, sizeof(uint32_t));
8358 meta.tuning_mod3_data_size = 0;
8359 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8360 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8361 written_len += write(file_fd, data, sizeof(uint32_t));
8362 size_t total_size = meta.tuning_sensor_data_size;
8363 data = (void *)((uint8_t *)&meta.data);
8364 written_len += write(file_fd, data, total_size);
8365 total_size = meta.tuning_vfe_data_size;
8366 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8367 written_len += write(file_fd, data, total_size);
8368 total_size = meta.tuning_cpp_data_size;
8369 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8370 written_len += write(file_fd, data, total_size);
8371 total_size = meta.tuning_cac_data_size;
8372 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8373 written_len += write(file_fd, data, total_size);
8374 close(file_fd);
8375 }else {
8376 LOGE("fail to open file for metadata dumping");
8377 }
8378 }
8379}
8380
8381/*===========================================================================
8382 * FUNCTION : cleanAndSortStreamInfo
8383 *
8384 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8385 * and sort them such that raw stream is at the end of the list
8386 * This is a workaround for camera daemon constraint.
8387 *
8388 * PARAMETERS : None
8389 *
8390 *==========================================================================*/
8391void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8392{
8393 List<stream_info_t *> newStreamInfo;
8394
8395 /*clean up invalid streams*/
8396 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8397 it != mStreamInfo.end();) {
8398 if(((*it)->status) == INVALID){
8399 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8400 delete channel;
8401 free(*it);
8402 it = mStreamInfo.erase(it);
8403 } else {
8404 it++;
8405 }
8406 }
8407
8408 // Move preview/video/callback/snapshot streams into newList
8409 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8410 it != mStreamInfo.end();) {
8411 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8412 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8413 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8414 newStreamInfo.push_back(*it);
8415 it = mStreamInfo.erase(it);
8416 } else
8417 it++;
8418 }
8419 // Move raw streams into newList
8420 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8421 it != mStreamInfo.end();) {
8422 newStreamInfo.push_back(*it);
8423 it = mStreamInfo.erase(it);
8424 }
8425
8426 mStreamInfo = newStreamInfo;
8427}
8428
8429/*===========================================================================
8430 * FUNCTION : extractJpegMetadata
8431 *
8432 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8433 * JPEG metadata is cached in HAL, and return as part of capture
8434 * result when metadata is returned from camera daemon.
8435 *
8436 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8437 * @request: capture request
8438 *
8439 *==========================================================================*/
8440void QCamera3HardwareInterface::extractJpegMetadata(
8441 CameraMetadata& jpegMetadata,
8442 const camera3_capture_request_t *request)
8443{
8444 CameraMetadata frame_settings;
8445 frame_settings = request->settings;
8446
8447 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8448 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8449 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8450 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8451
8452 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8453 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8454 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8455 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8456
8457 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8458 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8459 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8460 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8461
8462 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8463 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8464 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8465 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8466
8467 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8468 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8469 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8470 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8471
8472 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8473 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8474 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8475 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8476
8477 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8478 int32_t thumbnail_size[2];
8479 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8480 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8481 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8482 int32_t orientation =
8483 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008484 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008485 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8486 int32_t temp;
8487 temp = thumbnail_size[0];
8488 thumbnail_size[0] = thumbnail_size[1];
8489 thumbnail_size[1] = temp;
8490 }
8491 }
8492 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8493 thumbnail_size,
8494 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8495 }
8496
8497}
8498
8499/*===========================================================================
8500 * FUNCTION : convertToRegions
8501 *
8502 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8503 *
8504 * PARAMETERS :
8505 * @rect : cam_rect_t struct to convert
8506 * @region : int32_t destination array
8507 * @weight : if we are converting from cam_area_t, weight is valid
8508 * else weight = -1
8509 *
8510 *==========================================================================*/
8511void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8512 int32_t *region, int weight)
8513{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008514 region[FACE_LEFT] = rect.left;
8515 region[FACE_TOP] = rect.top;
8516 region[FACE_RIGHT] = rect.left + rect.width;
8517 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008518 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008519 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008520 }
8521}
8522
8523/*===========================================================================
8524 * FUNCTION : convertFromRegions
8525 *
8526 * DESCRIPTION: helper method to convert from array to cam_rect_t
8527 *
8528 * PARAMETERS :
8529 * @rect : cam_rect_t struct to convert
8530 * @region : int32_t destination array
8531 * @weight : if we are converting from cam_area_t, weight is valid
8532 * else weight = -1
8533 *
8534 *==========================================================================*/
8535void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008536 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008537{
Thierry Strudel3d639192016-09-09 11:52:26 -07008538 int32_t x_min = frame_settings.find(tag).data.i32[0];
8539 int32_t y_min = frame_settings.find(tag).data.i32[1];
8540 int32_t x_max = frame_settings.find(tag).data.i32[2];
8541 int32_t y_max = frame_settings.find(tag).data.i32[3];
8542 roi.weight = frame_settings.find(tag).data.i32[4];
8543 roi.rect.left = x_min;
8544 roi.rect.top = y_min;
8545 roi.rect.width = x_max - x_min;
8546 roi.rect.height = y_max - y_min;
8547}
8548
8549/*===========================================================================
8550 * FUNCTION : resetIfNeededROI
8551 *
8552 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8553 * crop region
8554 *
8555 * PARAMETERS :
8556 * @roi : cam_area_t struct to resize
8557 * @scalerCropRegion : cam_crop_region_t region to compare against
8558 *
8559 *
8560 *==========================================================================*/
8561bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8562 const cam_crop_region_t* scalerCropRegion)
8563{
8564 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8565 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8566 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8567 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8568
8569 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8570 * without having this check the calculations below to validate if the roi
8571 * is inside scalar crop region will fail resulting in the roi not being
8572 * reset causing algorithm to continue to use stale roi window
8573 */
8574 if (roi->weight == 0) {
8575 return true;
8576 }
8577
8578 if ((roi_x_max < scalerCropRegion->left) ||
8579 // right edge of roi window is left of scalar crop's left edge
8580 (roi_y_max < scalerCropRegion->top) ||
8581 // bottom edge of roi window is above scalar crop's top edge
8582 (roi->rect.left > crop_x_max) ||
8583 // left edge of roi window is beyond(right) of scalar crop's right edge
8584 (roi->rect.top > crop_y_max)){
8585 // top edge of roi windo is above scalar crop's top edge
8586 return false;
8587 }
8588 if (roi->rect.left < scalerCropRegion->left) {
8589 roi->rect.left = scalerCropRegion->left;
8590 }
8591 if (roi->rect.top < scalerCropRegion->top) {
8592 roi->rect.top = scalerCropRegion->top;
8593 }
8594 if (roi_x_max > crop_x_max) {
8595 roi_x_max = crop_x_max;
8596 }
8597 if (roi_y_max > crop_y_max) {
8598 roi_y_max = crop_y_max;
8599 }
8600 roi->rect.width = roi_x_max - roi->rect.left;
8601 roi->rect.height = roi_y_max - roi->rect.top;
8602 return true;
8603}
8604
8605/*===========================================================================
8606 * FUNCTION : convertLandmarks
8607 *
8608 * DESCRIPTION: helper method to extract the landmarks from face detection info
8609 *
8610 * PARAMETERS :
8611 * @landmark_data : input landmark data to be converted
8612 * @landmarks : int32_t destination array
8613 *
8614 *
8615 *==========================================================================*/
8616void QCamera3HardwareInterface::convertLandmarks(
8617 cam_face_landmarks_info_t landmark_data,
8618 int32_t *landmarks)
8619{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008620 if (landmark_data.is_left_eye_valid) {
8621 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8622 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8623 } else {
8624 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8625 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8626 }
8627
8628 if (landmark_data.is_right_eye_valid) {
8629 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8630 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8631 } else {
8632 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8633 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8634 }
8635
8636 if (landmark_data.is_mouth_valid) {
8637 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8638 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8639 } else {
8640 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8641 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8642 }
8643}
8644
8645/*===========================================================================
8646 * FUNCTION : setInvalidLandmarks
8647 *
8648 * DESCRIPTION: helper method to set invalid landmarks
8649 *
8650 * PARAMETERS :
8651 * @landmarks : int32_t destination array
8652 *
8653 *
8654 *==========================================================================*/
8655void QCamera3HardwareInterface::setInvalidLandmarks(
8656 int32_t *landmarks)
8657{
8658 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8659 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8660 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8661 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8662 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8663 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008664}
8665
8666#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008667
8668/*===========================================================================
8669 * FUNCTION : getCapabilities
8670 *
8671 * DESCRIPTION: query camera capability from back-end
8672 *
8673 * PARAMETERS :
8674 * @ops : mm-interface ops structure
8675 * @cam_handle : camera handle for which we need capability
8676 *
8677 * RETURN : ptr type of capability structure
8678 * capability for success
8679 * NULL for failure
8680 *==========================================================================*/
8681cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8682 uint32_t cam_handle)
8683{
8684 int rc = NO_ERROR;
8685 QCamera3HeapMemory *capabilityHeap = NULL;
8686 cam_capability_t *cap_ptr = NULL;
8687
8688 if (ops == NULL) {
8689 LOGE("Invalid arguments");
8690 return NULL;
8691 }
8692
8693 capabilityHeap = new QCamera3HeapMemory(1);
8694 if (capabilityHeap == NULL) {
8695 LOGE("creation of capabilityHeap failed");
8696 return NULL;
8697 }
8698
8699 /* Allocate memory for capability buffer */
8700 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8701 if(rc != OK) {
8702 LOGE("No memory for cappability");
8703 goto allocate_failed;
8704 }
8705
8706 /* Map memory for capability buffer */
8707 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8708
8709 rc = ops->map_buf(cam_handle,
8710 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8711 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8712 if(rc < 0) {
8713 LOGE("failed to map capability buffer");
8714 rc = FAILED_TRANSACTION;
8715 goto map_failed;
8716 }
8717
8718 /* Query Capability */
8719 rc = ops->query_capability(cam_handle);
8720 if(rc < 0) {
8721 LOGE("failed to query capability");
8722 rc = FAILED_TRANSACTION;
8723 goto query_failed;
8724 }
8725
8726 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8727 if (cap_ptr == NULL) {
8728 LOGE("out of memory");
8729 rc = NO_MEMORY;
8730 goto query_failed;
8731 }
8732
8733 memset(cap_ptr, 0, sizeof(cam_capability_t));
8734 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8735
8736 int index;
8737 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8738 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8739 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8740 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8741 }
8742
8743query_failed:
8744 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8745map_failed:
8746 capabilityHeap->deallocate();
8747allocate_failed:
8748 delete capabilityHeap;
8749
8750 if (rc != NO_ERROR) {
8751 return NULL;
8752 } else {
8753 return cap_ptr;
8754 }
8755}
8756
Thierry Strudel3d639192016-09-09 11:52:26 -07008757/*===========================================================================
8758 * FUNCTION : initCapabilities
8759 *
8760 * DESCRIPTION: initialize camera capabilities in static data struct
8761 *
8762 * PARAMETERS :
8763 * @cameraId : camera Id
8764 *
8765 * RETURN : int32_t type of status
8766 * NO_ERROR -- success
8767 * none-zero failure code
8768 *==========================================================================*/
8769int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8770{
8771 int rc = 0;
8772 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008773 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008774
8775 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8776 if (rc) {
8777 LOGE("camera_open failed. rc = %d", rc);
8778 goto open_failed;
8779 }
8780 if (!cameraHandle) {
8781 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8782 goto open_failed;
8783 }
8784
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008785 handle = get_main_camera_handle(cameraHandle->camera_handle);
8786 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8787 if (gCamCapability[cameraId] == NULL) {
8788 rc = FAILED_TRANSACTION;
8789 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008790 }
8791
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008792 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008793 if (is_dual_camera_by_idx(cameraId)) {
8794 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8795 gCamCapability[cameraId]->aux_cam_cap =
8796 getCapabilities(cameraHandle->ops, handle);
8797 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8798 rc = FAILED_TRANSACTION;
8799 free(gCamCapability[cameraId]);
8800 goto failed_op;
8801 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008802
8803 // Copy the main camera capability to main_cam_cap struct
8804 gCamCapability[cameraId]->main_cam_cap =
8805 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8806 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8807 LOGE("out of memory");
8808 rc = NO_MEMORY;
8809 goto failed_op;
8810 }
8811 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8812 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008813 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008814failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008815 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8816 cameraHandle = NULL;
8817open_failed:
8818 return rc;
8819}
8820
8821/*==========================================================================
8822 * FUNCTION : get3Aversion
8823 *
8824 * DESCRIPTION: get the Q3A S/W version
8825 *
8826 * PARAMETERS :
8827 * @sw_version: Reference of Q3A structure which will hold version info upon
8828 * return
8829 *
8830 * RETURN : None
8831 *
8832 *==========================================================================*/
8833void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8834{
8835 if(gCamCapability[mCameraId])
8836 sw_version = gCamCapability[mCameraId]->q3a_version;
8837 else
8838 LOGE("Capability structure NULL!");
8839}
8840
8841
8842/*===========================================================================
8843 * FUNCTION : initParameters
8844 *
8845 * DESCRIPTION: initialize camera parameters
8846 *
8847 * PARAMETERS :
8848 *
8849 * RETURN : int32_t type of status
8850 * NO_ERROR -- success
8851 * none-zero failure code
8852 *==========================================================================*/
8853int QCamera3HardwareInterface::initParameters()
8854{
8855 int rc = 0;
8856
8857 //Allocate Set Param Buffer
8858 mParamHeap = new QCamera3HeapMemory(1);
8859 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8860 if(rc != OK) {
8861 rc = NO_MEMORY;
8862 LOGE("Failed to allocate SETPARM Heap memory");
8863 delete mParamHeap;
8864 mParamHeap = NULL;
8865 return rc;
8866 }
8867
8868 //Map memory for parameters buffer
8869 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8870 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8871 mParamHeap->getFd(0),
8872 sizeof(metadata_buffer_t),
8873 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8874 if(rc < 0) {
8875 LOGE("failed to map SETPARM buffer");
8876 rc = FAILED_TRANSACTION;
8877 mParamHeap->deallocate();
8878 delete mParamHeap;
8879 mParamHeap = NULL;
8880 return rc;
8881 }
8882
8883 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8884
8885 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8886 return rc;
8887}
8888
8889/*===========================================================================
8890 * FUNCTION : deinitParameters
8891 *
8892 * DESCRIPTION: de-initialize camera parameters
8893 *
8894 * PARAMETERS :
8895 *
8896 * RETURN : NONE
8897 *==========================================================================*/
8898void QCamera3HardwareInterface::deinitParameters()
8899{
8900 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8901 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8902
8903 mParamHeap->deallocate();
8904 delete mParamHeap;
8905 mParamHeap = NULL;
8906
8907 mParameters = NULL;
8908
8909 free(mPrevParameters);
8910 mPrevParameters = NULL;
8911}
8912
8913/*===========================================================================
8914 * FUNCTION : calcMaxJpegSize
8915 *
8916 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8917 *
8918 * PARAMETERS :
8919 *
8920 * RETURN : max_jpeg_size
8921 *==========================================================================*/
8922size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8923{
8924 size_t max_jpeg_size = 0;
8925 size_t temp_width, temp_height;
8926 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8927 MAX_SIZES_CNT);
8928 for (size_t i = 0; i < count; i++) {
8929 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8930 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8931 if (temp_width * temp_height > max_jpeg_size ) {
8932 max_jpeg_size = temp_width * temp_height;
8933 }
8934 }
8935 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8936 return max_jpeg_size;
8937}
8938
8939/*===========================================================================
8940 * FUNCTION : getMaxRawSize
8941 *
8942 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8943 *
8944 * PARAMETERS :
8945 *
8946 * RETURN : Largest supported Raw Dimension
8947 *==========================================================================*/
8948cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8949{
8950 int max_width = 0;
8951 cam_dimension_t maxRawSize;
8952
8953 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8954 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8955 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8956 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8957 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8958 }
8959 }
8960 return maxRawSize;
8961}
8962
8963
8964/*===========================================================================
8965 * FUNCTION : calcMaxJpegDim
8966 *
8967 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8968 *
8969 * PARAMETERS :
8970 *
8971 * RETURN : max_jpeg_dim
8972 *==========================================================================*/
8973cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8974{
8975 cam_dimension_t max_jpeg_dim;
8976 cam_dimension_t curr_jpeg_dim;
8977 max_jpeg_dim.width = 0;
8978 max_jpeg_dim.height = 0;
8979 curr_jpeg_dim.width = 0;
8980 curr_jpeg_dim.height = 0;
8981 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8982 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8983 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8984 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8985 max_jpeg_dim.width * max_jpeg_dim.height ) {
8986 max_jpeg_dim.width = curr_jpeg_dim.width;
8987 max_jpeg_dim.height = curr_jpeg_dim.height;
8988 }
8989 }
8990 return max_jpeg_dim;
8991}
8992
8993/*===========================================================================
8994 * FUNCTION : addStreamConfig
8995 *
8996 * DESCRIPTION: adds the stream configuration to the array
8997 *
8998 * PARAMETERS :
8999 * @available_stream_configs : pointer to stream configuration array
9000 * @scalar_format : scalar format
9001 * @dim : configuration dimension
9002 * @config_type : input or output configuration type
9003 *
9004 * RETURN : NONE
9005 *==========================================================================*/
9006void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9007 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9008{
9009 available_stream_configs.add(scalar_format);
9010 available_stream_configs.add(dim.width);
9011 available_stream_configs.add(dim.height);
9012 available_stream_configs.add(config_type);
9013}
9014
9015/*===========================================================================
9016 * FUNCTION : suppportBurstCapture
9017 *
9018 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9019 *
9020 * PARAMETERS :
9021 * @cameraId : camera Id
9022 *
9023 * RETURN : true if camera supports BURST_CAPTURE
9024 * false otherwise
9025 *==========================================================================*/
9026bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9027{
9028 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9029 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9030 const int32_t highResWidth = 3264;
9031 const int32_t highResHeight = 2448;
9032
9033 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9034 // Maximum resolution images cannot be captured at >= 10fps
9035 // -> not supporting BURST_CAPTURE
9036 return false;
9037 }
9038
9039 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9040 // Maximum resolution images can be captured at >= 20fps
9041 // --> supporting BURST_CAPTURE
9042 return true;
9043 }
9044
9045 // Find the smallest highRes resolution, or largest resolution if there is none
9046 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9047 MAX_SIZES_CNT);
9048 size_t highRes = 0;
9049 while ((highRes + 1 < totalCnt) &&
9050 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9051 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9052 highResWidth * highResHeight)) {
9053 highRes++;
9054 }
9055 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9056 return true;
9057 } else {
9058 return false;
9059 }
9060}
9061
9062/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009063 * FUNCTION : getPDStatIndex
9064 *
9065 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9066 *
9067 * PARAMETERS :
9068 * @caps : camera capabilities
9069 *
9070 * RETURN : int32_t type
9071 * non-negative - on success
9072 * -1 - on failure
9073 *==========================================================================*/
9074int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9075 if (nullptr == caps) {
9076 return -1;
9077 }
9078
9079 uint32_t metaRawCount = caps->meta_raw_channel_count;
9080 int32_t ret = -1;
9081 for (size_t i = 0; i < metaRawCount; i++) {
9082 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9083 ret = i;
9084 break;
9085 }
9086 }
9087
9088 return ret;
9089}
9090
9091/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009092 * FUNCTION : initStaticMetadata
9093 *
9094 * DESCRIPTION: initialize the static metadata
9095 *
9096 * PARAMETERS :
9097 * @cameraId : camera Id
9098 *
9099 * RETURN : int32_t type of status
9100 * 0 -- success
9101 * non-zero failure code
9102 *==========================================================================*/
9103int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9104{
9105 int rc = 0;
9106 CameraMetadata staticInfo;
9107 size_t count = 0;
9108 bool limitedDevice = false;
9109 char prop[PROPERTY_VALUE_MAX];
9110 bool supportBurst = false;
9111
9112 supportBurst = supportBurstCapture(cameraId);
9113
9114 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9115 * guaranteed or if min fps of max resolution is less than 20 fps, its
9116 * advertised as limited device*/
9117 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9118 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9119 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9120 !supportBurst;
9121
9122 uint8_t supportedHwLvl = limitedDevice ?
9123 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009124#ifndef USE_HAL_3_3
9125 // LEVEL_3 - This device will support level 3.
9126 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9127#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009128 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009129#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009130
9131 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9132 &supportedHwLvl, 1);
9133
9134 bool facingBack = false;
9135 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9136 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9137 facingBack = true;
9138 }
9139 /*HAL 3 only*/
9140 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9141 &gCamCapability[cameraId]->min_focus_distance, 1);
9142
9143 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9144 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9145
9146 /*should be using focal lengths but sensor doesn't provide that info now*/
9147 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9148 &gCamCapability[cameraId]->focal_length,
9149 1);
9150
9151 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9152 gCamCapability[cameraId]->apertures,
9153 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9154
9155 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9156 gCamCapability[cameraId]->filter_densities,
9157 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9158
9159
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009160 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9161 size_t mode_count =
9162 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9163 for (size_t i = 0; i < mode_count; i++) {
9164 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9165 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009166 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009167 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009168
9169 int32_t lens_shading_map_size[] = {
9170 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9171 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9172 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9173 lens_shading_map_size,
9174 sizeof(lens_shading_map_size)/sizeof(int32_t));
9175
9176 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9177 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9178
9179 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9180 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9181
9182 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9183 &gCamCapability[cameraId]->max_frame_duration, 1);
9184
9185 camera_metadata_rational baseGainFactor = {
9186 gCamCapability[cameraId]->base_gain_factor.numerator,
9187 gCamCapability[cameraId]->base_gain_factor.denominator};
9188 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9189 &baseGainFactor, 1);
9190
9191 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9192 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9193
9194 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9195 gCamCapability[cameraId]->pixel_array_size.height};
9196 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9197 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9198
9199 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9200 gCamCapability[cameraId]->active_array_size.top,
9201 gCamCapability[cameraId]->active_array_size.width,
9202 gCamCapability[cameraId]->active_array_size.height};
9203 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9204 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9205
9206 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9207 &gCamCapability[cameraId]->white_level, 1);
9208
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009209 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9210 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9211 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009212 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009213 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009214
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009215#ifndef USE_HAL_3_3
9216 bool hasBlackRegions = false;
9217 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9218 LOGW("black_region_count: %d is bounded to %d",
9219 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9220 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9221 }
9222 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9223 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9224 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9225 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9226 }
9227 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9228 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9229 hasBlackRegions = true;
9230 }
9231#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009232 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9233 &gCamCapability[cameraId]->flash_charge_duration, 1);
9234
9235 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9236 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9237
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009238 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9239 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9240 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009241 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9242 &timestampSource, 1);
9243
Thierry Strudel54dc9782017-02-15 12:12:10 -08009244 //update histogram vendor data
9245 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009246 &gCamCapability[cameraId]->histogram_size, 1);
9247
Thierry Strudel54dc9782017-02-15 12:12:10 -08009248 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009249 &gCamCapability[cameraId]->max_histogram_count, 1);
9250
Shuzhen Wang14415f52016-11-16 18:26:18 -08009251 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9252 //so that app can request fewer number of bins than the maximum supported.
9253 std::vector<int32_t> histBins;
9254 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9255 histBins.push_back(maxHistBins);
9256 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9257 (maxHistBins & 0x1) == 0) {
9258 histBins.push_back(maxHistBins >> 1);
9259 maxHistBins >>= 1;
9260 }
9261 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9262 histBins.data(), histBins.size());
9263
Thierry Strudel3d639192016-09-09 11:52:26 -07009264 int32_t sharpness_map_size[] = {
9265 gCamCapability[cameraId]->sharpness_map_size.width,
9266 gCamCapability[cameraId]->sharpness_map_size.height};
9267
9268 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9269 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9270
9271 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9272 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9273
Emilian Peev0f3c3162017-03-15 12:57:46 +00009274 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9275 if (0 <= indexPD) {
9276 // Advertise PD stats data as part of the Depth capabilities
9277 int32_t depthWidth =
9278 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9279 int32_t depthHeight =
9280 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9281 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9282 assert(0 < depthSamplesCount);
9283 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9284 &depthSamplesCount, 1);
9285
9286 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9287 depthHeight,
9288 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9289 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9290 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9291 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9292 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9293
9294 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9295 depthHeight, 33333333,
9296 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9297 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9298 depthMinDuration,
9299 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9300
9301 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9302 depthHeight, 0,
9303 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9304 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9305 depthStallDuration,
9306 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9307
9308 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9309 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9310 }
9311
Thierry Strudel3d639192016-09-09 11:52:26 -07009312 int32_t scalar_formats[] = {
9313 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9314 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9315 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9316 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9317 HAL_PIXEL_FORMAT_RAW10,
9318 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009319 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9320 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9321 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009322
9323 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9324 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9325 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9326 count, MAX_SIZES_CNT, available_processed_sizes);
9327 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9328 available_processed_sizes, count * 2);
9329
9330 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9331 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9332 makeTable(gCamCapability[cameraId]->raw_dim,
9333 count, MAX_SIZES_CNT, available_raw_sizes);
9334 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9335 available_raw_sizes, count * 2);
9336
9337 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9338 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9339 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9340 count, MAX_SIZES_CNT, available_fps_ranges);
9341 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9342 available_fps_ranges, count * 2);
9343
9344 camera_metadata_rational exposureCompensationStep = {
9345 gCamCapability[cameraId]->exp_compensation_step.numerator,
9346 gCamCapability[cameraId]->exp_compensation_step.denominator};
9347 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9348 &exposureCompensationStep, 1);
9349
9350 Vector<uint8_t> availableVstabModes;
9351 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9352 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009353 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009354 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009355 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009356 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009357 count = IS_TYPE_MAX;
9358 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9359 for (size_t i = 0; i < count; i++) {
9360 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9361 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9362 eisSupported = true;
9363 break;
9364 }
9365 }
9366 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009367 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9368 }
9369 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9370 availableVstabModes.array(), availableVstabModes.size());
9371
9372 /*HAL 1 and HAL 3 common*/
9373 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9374 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9375 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009376 // Cap the max zoom to the max preferred value
9377 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009378 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9379 &maxZoom, 1);
9380
9381 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9382 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9383
9384 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9385 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9386 max3aRegions[2] = 0; /* AF not supported */
9387 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9388 max3aRegions, 3);
9389
9390 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9391 memset(prop, 0, sizeof(prop));
9392 property_get("persist.camera.facedetect", prop, "1");
9393 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9394 LOGD("Support face detection mode: %d",
9395 supportedFaceDetectMode);
9396
9397 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009398 /* support mode should be OFF if max number of face is 0 */
9399 if (maxFaces <= 0) {
9400 supportedFaceDetectMode = 0;
9401 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009402 Vector<uint8_t> availableFaceDetectModes;
9403 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9404 if (supportedFaceDetectMode == 1) {
9405 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9406 } else if (supportedFaceDetectMode == 2) {
9407 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9408 } else if (supportedFaceDetectMode == 3) {
9409 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9410 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9411 } else {
9412 maxFaces = 0;
9413 }
9414 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9415 availableFaceDetectModes.array(),
9416 availableFaceDetectModes.size());
9417 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9418 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009419 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9420 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9421 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009422
9423 int32_t exposureCompensationRange[] = {
9424 gCamCapability[cameraId]->exposure_compensation_min,
9425 gCamCapability[cameraId]->exposure_compensation_max};
9426 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9427 exposureCompensationRange,
9428 sizeof(exposureCompensationRange)/sizeof(int32_t));
9429
9430 uint8_t lensFacing = (facingBack) ?
9431 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9432 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9433
9434 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9435 available_thumbnail_sizes,
9436 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9437
9438 /*all sizes will be clubbed into this tag*/
9439 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9440 /*android.scaler.availableStreamConfigurations*/
9441 Vector<int32_t> available_stream_configs;
9442 cam_dimension_t active_array_dim;
9443 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9444 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009445
9446 /*advertise list of input dimensions supported based on below property.
9447 By default all sizes upto 5MP will be advertised.
9448 Note that the setprop resolution format should be WxH.
9449 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9450 To list all supported sizes, setprop needs to be set with "0x0" */
9451 cam_dimension_t minInputSize = {2592,1944}; //5MP
9452 memset(prop, 0, sizeof(prop));
9453 property_get("persist.camera.input.minsize", prop, "2592x1944");
9454 if (strlen(prop) > 0) {
9455 char *saveptr = NULL;
9456 char *token = strtok_r(prop, "x", &saveptr);
9457 if (token != NULL) {
9458 minInputSize.width = atoi(token);
9459 }
9460 token = strtok_r(NULL, "x", &saveptr);
9461 if (token != NULL) {
9462 minInputSize.height = atoi(token);
9463 }
9464 }
9465
Thierry Strudel3d639192016-09-09 11:52:26 -07009466 /* Add input/output stream configurations for each scalar formats*/
9467 for (size_t j = 0; j < scalar_formats_count; j++) {
9468 switch (scalar_formats[j]) {
9469 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9470 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9471 case HAL_PIXEL_FORMAT_RAW10:
9472 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9473 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9474 addStreamConfig(available_stream_configs, scalar_formats[j],
9475 gCamCapability[cameraId]->raw_dim[i],
9476 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9477 }
9478 break;
9479 case HAL_PIXEL_FORMAT_BLOB:
9480 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9481 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9482 addStreamConfig(available_stream_configs, scalar_formats[j],
9483 gCamCapability[cameraId]->picture_sizes_tbl[i],
9484 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9485 }
9486 break;
9487 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9488 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9489 default:
9490 cam_dimension_t largest_picture_size;
9491 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9492 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9493 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9494 addStreamConfig(available_stream_configs, scalar_formats[j],
9495 gCamCapability[cameraId]->picture_sizes_tbl[i],
9496 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009497 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009498 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9499 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009500 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9501 >= minInputSize.width) || (gCamCapability[cameraId]->
9502 picture_sizes_tbl[i].height >= minInputSize.height)) {
9503 addStreamConfig(available_stream_configs, scalar_formats[j],
9504 gCamCapability[cameraId]->picture_sizes_tbl[i],
9505 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9506 }
9507 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009508 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009509
Thierry Strudel3d639192016-09-09 11:52:26 -07009510 break;
9511 }
9512 }
9513
9514 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9515 available_stream_configs.array(), available_stream_configs.size());
9516 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9517 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9518
9519 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9520 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9521
9522 /* android.scaler.availableMinFrameDurations */
9523 Vector<int64_t> available_min_durations;
9524 for (size_t j = 0; j < scalar_formats_count; j++) {
9525 switch (scalar_formats[j]) {
9526 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9527 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9528 case HAL_PIXEL_FORMAT_RAW10:
9529 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9530 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9531 available_min_durations.add(scalar_formats[j]);
9532 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9533 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9534 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9535 }
9536 break;
9537 default:
9538 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9539 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9540 available_min_durations.add(scalar_formats[j]);
9541 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9542 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9543 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9544 }
9545 break;
9546 }
9547 }
9548 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9549 available_min_durations.array(), available_min_durations.size());
9550
9551 Vector<int32_t> available_hfr_configs;
9552 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9553 int32_t fps = 0;
9554 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9555 case CAM_HFR_MODE_60FPS:
9556 fps = 60;
9557 break;
9558 case CAM_HFR_MODE_90FPS:
9559 fps = 90;
9560 break;
9561 case CAM_HFR_MODE_120FPS:
9562 fps = 120;
9563 break;
9564 case CAM_HFR_MODE_150FPS:
9565 fps = 150;
9566 break;
9567 case CAM_HFR_MODE_180FPS:
9568 fps = 180;
9569 break;
9570 case CAM_HFR_MODE_210FPS:
9571 fps = 210;
9572 break;
9573 case CAM_HFR_MODE_240FPS:
9574 fps = 240;
9575 break;
9576 case CAM_HFR_MODE_480FPS:
9577 fps = 480;
9578 break;
9579 case CAM_HFR_MODE_OFF:
9580 case CAM_HFR_MODE_MAX:
9581 default:
9582 break;
9583 }
9584
9585 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9586 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9587 /* For each HFR frame rate, need to advertise one variable fps range
9588 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9589 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9590 * set by the app. When video recording is started, [120, 120] is
9591 * set. This way sensor configuration does not change when recording
9592 * is started */
9593
9594 /* (width, height, fps_min, fps_max, batch_size_max) */
9595 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9596 j < MAX_SIZES_CNT; j++) {
9597 available_hfr_configs.add(
9598 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9599 available_hfr_configs.add(
9600 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9601 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9602 available_hfr_configs.add(fps);
9603 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9604
9605 /* (width, height, fps_min, fps_max, batch_size_max) */
9606 available_hfr_configs.add(
9607 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9608 available_hfr_configs.add(
9609 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9610 available_hfr_configs.add(fps);
9611 available_hfr_configs.add(fps);
9612 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9613 }
9614 }
9615 }
9616 //Advertise HFR capability only if the property is set
9617 memset(prop, 0, sizeof(prop));
9618 property_get("persist.camera.hal3hfr.enable", prop, "1");
9619 uint8_t hfrEnable = (uint8_t)atoi(prop);
9620
9621 if(hfrEnable && available_hfr_configs.array()) {
9622 staticInfo.update(
9623 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9624 available_hfr_configs.array(), available_hfr_configs.size());
9625 }
9626
9627 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9628 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9629 &max_jpeg_size, 1);
9630
9631 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9632 size_t size = 0;
9633 count = CAM_EFFECT_MODE_MAX;
9634 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9635 for (size_t i = 0; i < count; i++) {
9636 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9637 gCamCapability[cameraId]->supported_effects[i]);
9638 if (NAME_NOT_FOUND != val) {
9639 avail_effects[size] = (uint8_t)val;
9640 size++;
9641 }
9642 }
9643 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9644 avail_effects,
9645 size);
9646
9647 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9648 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9649 size_t supported_scene_modes_cnt = 0;
9650 count = CAM_SCENE_MODE_MAX;
9651 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9652 for (size_t i = 0; i < count; i++) {
9653 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9654 CAM_SCENE_MODE_OFF) {
9655 int val = lookupFwkName(SCENE_MODES_MAP,
9656 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9657 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009658
Thierry Strudel3d639192016-09-09 11:52:26 -07009659 if (NAME_NOT_FOUND != val) {
9660 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9661 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9662 supported_scene_modes_cnt++;
9663 }
9664 }
9665 }
9666 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9667 avail_scene_modes,
9668 supported_scene_modes_cnt);
9669
9670 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9671 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9672 supported_scene_modes_cnt,
9673 CAM_SCENE_MODE_MAX,
9674 scene_mode_overrides,
9675 supported_indexes,
9676 cameraId);
9677
9678 if (supported_scene_modes_cnt == 0) {
9679 supported_scene_modes_cnt = 1;
9680 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9681 }
9682
9683 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9684 scene_mode_overrides, supported_scene_modes_cnt * 3);
9685
9686 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9687 ANDROID_CONTROL_MODE_AUTO,
9688 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9689 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9690 available_control_modes,
9691 3);
9692
9693 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9694 size = 0;
9695 count = CAM_ANTIBANDING_MODE_MAX;
9696 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9697 for (size_t i = 0; i < count; i++) {
9698 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9699 gCamCapability[cameraId]->supported_antibandings[i]);
9700 if (NAME_NOT_FOUND != val) {
9701 avail_antibanding_modes[size] = (uint8_t)val;
9702 size++;
9703 }
9704
9705 }
9706 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9707 avail_antibanding_modes,
9708 size);
9709
9710 uint8_t avail_abberation_modes[] = {
9711 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9712 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9713 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9714 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9715 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9716 if (0 == count) {
9717 // If no aberration correction modes are available for a device, this advertise OFF mode
9718 size = 1;
9719 } else {
9720 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9721 // So, advertize all 3 modes if atleast any one mode is supported as per the
9722 // new M requirement
9723 size = 3;
9724 }
9725 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9726 avail_abberation_modes,
9727 size);
9728
9729 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9730 size = 0;
9731 count = CAM_FOCUS_MODE_MAX;
9732 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9733 for (size_t i = 0; i < count; i++) {
9734 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9735 gCamCapability[cameraId]->supported_focus_modes[i]);
9736 if (NAME_NOT_FOUND != val) {
9737 avail_af_modes[size] = (uint8_t)val;
9738 size++;
9739 }
9740 }
9741 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9742 avail_af_modes,
9743 size);
9744
9745 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9746 size = 0;
9747 count = CAM_WB_MODE_MAX;
9748 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9749 for (size_t i = 0; i < count; i++) {
9750 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9751 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9752 gCamCapability[cameraId]->supported_white_balances[i]);
9753 if (NAME_NOT_FOUND != val) {
9754 avail_awb_modes[size] = (uint8_t)val;
9755 size++;
9756 }
9757 }
9758 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9759 avail_awb_modes,
9760 size);
9761
9762 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9763 count = CAM_FLASH_FIRING_LEVEL_MAX;
9764 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9765 count);
9766 for (size_t i = 0; i < count; i++) {
9767 available_flash_levels[i] =
9768 gCamCapability[cameraId]->supported_firing_levels[i];
9769 }
9770 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9771 available_flash_levels, count);
9772
9773 uint8_t flashAvailable;
9774 if (gCamCapability[cameraId]->flash_available)
9775 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9776 else
9777 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9778 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9779 &flashAvailable, 1);
9780
9781 Vector<uint8_t> avail_ae_modes;
9782 count = CAM_AE_MODE_MAX;
9783 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9784 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009785 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9786 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9787 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9788 }
9789 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009790 }
9791 if (flashAvailable) {
9792 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9793 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9794 }
9795 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9796 avail_ae_modes.array(),
9797 avail_ae_modes.size());
9798
9799 int32_t sensitivity_range[2];
9800 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9801 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9802 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9803 sensitivity_range,
9804 sizeof(sensitivity_range) / sizeof(int32_t));
9805
9806 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9807 &gCamCapability[cameraId]->max_analog_sensitivity,
9808 1);
9809
9810 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9811 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9812 &sensor_orientation,
9813 1);
9814
9815 int32_t max_output_streams[] = {
9816 MAX_STALLING_STREAMS,
9817 MAX_PROCESSED_STREAMS,
9818 MAX_RAW_STREAMS};
9819 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9820 max_output_streams,
9821 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9822
9823 uint8_t avail_leds = 0;
9824 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9825 &avail_leds, 0);
9826
9827 uint8_t focus_dist_calibrated;
9828 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9829 gCamCapability[cameraId]->focus_dist_calibrated);
9830 if (NAME_NOT_FOUND != val) {
9831 focus_dist_calibrated = (uint8_t)val;
9832 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9833 &focus_dist_calibrated, 1);
9834 }
9835
9836 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9837 size = 0;
9838 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9839 MAX_TEST_PATTERN_CNT);
9840 for (size_t i = 0; i < count; i++) {
9841 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9842 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9843 if (NAME_NOT_FOUND != testpatternMode) {
9844 avail_testpattern_modes[size] = testpatternMode;
9845 size++;
9846 }
9847 }
9848 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9849 avail_testpattern_modes,
9850 size);
9851
9852 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9853 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9854 &max_pipeline_depth,
9855 1);
9856
9857 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9858 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9859 &partial_result_count,
9860 1);
9861
9862 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9863 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9864
9865 Vector<uint8_t> available_capabilities;
9866 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9867 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9868 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9869 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9870 if (supportBurst) {
9871 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9872 }
9873 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9874 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9875 if (hfrEnable && available_hfr_configs.array()) {
9876 available_capabilities.add(
9877 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9878 }
9879
9880 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9881 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9882 }
9883 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9884 available_capabilities.array(),
9885 available_capabilities.size());
9886
9887 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9888 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9889 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9890 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9891
9892 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9893 &aeLockAvailable, 1);
9894
9895 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9896 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9897 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9898 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9899
9900 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9901 &awbLockAvailable, 1);
9902
9903 int32_t max_input_streams = 1;
9904 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9905 &max_input_streams,
9906 1);
9907
9908 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9909 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9910 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9911 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9912 HAL_PIXEL_FORMAT_YCbCr_420_888};
9913 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9914 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9915
9916 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9917 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9918 &max_latency,
9919 1);
9920
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009921#ifndef USE_HAL_3_3
9922 int32_t isp_sensitivity_range[2];
9923 isp_sensitivity_range[0] =
9924 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9925 isp_sensitivity_range[1] =
9926 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9927 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9928 isp_sensitivity_range,
9929 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9930#endif
9931
Thierry Strudel3d639192016-09-09 11:52:26 -07009932 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9933 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9934 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9935 available_hot_pixel_modes,
9936 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9937
9938 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9939 ANDROID_SHADING_MODE_FAST,
9940 ANDROID_SHADING_MODE_HIGH_QUALITY};
9941 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9942 available_shading_modes,
9943 3);
9944
9945 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9946 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9947 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9948 available_lens_shading_map_modes,
9949 2);
9950
9951 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9952 ANDROID_EDGE_MODE_FAST,
9953 ANDROID_EDGE_MODE_HIGH_QUALITY,
9954 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9955 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9956 available_edge_modes,
9957 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9958
9959 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9960 ANDROID_NOISE_REDUCTION_MODE_FAST,
9961 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9962 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9963 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9964 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9965 available_noise_red_modes,
9966 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9967
9968 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9969 ANDROID_TONEMAP_MODE_FAST,
9970 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9971 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9972 available_tonemap_modes,
9973 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9974
9975 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9976 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9977 available_hot_pixel_map_modes,
9978 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9979
9980 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9981 gCamCapability[cameraId]->reference_illuminant1);
9982 if (NAME_NOT_FOUND != val) {
9983 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9984 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9985 }
9986
9987 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9988 gCamCapability[cameraId]->reference_illuminant2);
9989 if (NAME_NOT_FOUND != val) {
9990 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9991 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9992 }
9993
9994 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9995 (void *)gCamCapability[cameraId]->forward_matrix1,
9996 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9997
9998 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9999 (void *)gCamCapability[cameraId]->forward_matrix2,
10000 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10001
10002 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10003 (void *)gCamCapability[cameraId]->color_transform1,
10004 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10005
10006 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10007 (void *)gCamCapability[cameraId]->color_transform2,
10008 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10009
10010 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10011 (void *)gCamCapability[cameraId]->calibration_transform1,
10012 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10013
10014 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10015 (void *)gCamCapability[cameraId]->calibration_transform2,
10016 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10017
10018 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10019 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10020 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10021 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10022 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10023 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10024 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10025 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10026 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10027 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10028 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10029 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10030 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10031 ANDROID_JPEG_GPS_COORDINATES,
10032 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10033 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10034 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10035 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10036 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10037 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10038 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10039 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10040 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10041 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010042#ifndef USE_HAL_3_3
10043 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10044#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010045 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010046 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010047 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10048 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010049 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010050 /* DevCamDebug metadata request_keys_basic */
10051 DEVCAMDEBUG_META_ENABLE,
10052 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010053 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010054 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010055 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010056 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -080010057 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010058
10059 size_t request_keys_cnt =
10060 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10061 Vector<int32_t> available_request_keys;
10062 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10063 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10064 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10065 }
10066
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010067 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +000010068 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010069 }
10070
Thierry Strudel3d639192016-09-09 11:52:26 -070010071 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10072 available_request_keys.array(), available_request_keys.size());
10073
10074 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10075 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10076 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10077 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10078 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10079 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10080 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10081 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10082 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10083 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10084 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10085 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10086 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10087 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10088 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10089 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10090 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010091 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010092 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10093 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10094 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010095 ANDROID_STATISTICS_FACE_SCORES,
10096#ifndef USE_HAL_3_3
10097 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10098#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010099 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010100 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010101 // DevCamDebug metadata result_keys_basic
10102 DEVCAMDEBUG_META_ENABLE,
10103 // DevCamDebug metadata result_keys AF
10104 DEVCAMDEBUG_AF_LENS_POSITION,
10105 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10106 DEVCAMDEBUG_AF_TOF_DISTANCE,
10107 DEVCAMDEBUG_AF_LUMA,
10108 DEVCAMDEBUG_AF_HAF_STATE,
10109 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10110 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10111 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10112 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10113 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10114 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10115 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10116 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10117 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10118 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10119 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10120 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10121 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10122 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10123 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10124 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10125 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10126 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10127 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10128 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10129 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10130 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10131 // DevCamDebug metadata result_keys AEC
10132 DEVCAMDEBUG_AEC_TARGET_LUMA,
10133 DEVCAMDEBUG_AEC_COMP_LUMA,
10134 DEVCAMDEBUG_AEC_AVG_LUMA,
10135 DEVCAMDEBUG_AEC_CUR_LUMA,
10136 DEVCAMDEBUG_AEC_LINECOUNT,
10137 DEVCAMDEBUG_AEC_REAL_GAIN,
10138 DEVCAMDEBUG_AEC_EXP_INDEX,
10139 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010140 // DevCamDebug metadata result_keys zzHDR
10141 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10142 DEVCAMDEBUG_AEC_L_LINECOUNT,
10143 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10144 DEVCAMDEBUG_AEC_S_LINECOUNT,
10145 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10146 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10147 // DevCamDebug metadata result_keys ADRC
10148 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10149 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10150 DEVCAMDEBUG_AEC_GTM_RATIO,
10151 DEVCAMDEBUG_AEC_LTM_RATIO,
10152 DEVCAMDEBUG_AEC_LA_RATIO,
10153 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010154 // DevCamDebug metadata result_keys AWB
10155 DEVCAMDEBUG_AWB_R_GAIN,
10156 DEVCAMDEBUG_AWB_G_GAIN,
10157 DEVCAMDEBUG_AWB_B_GAIN,
10158 DEVCAMDEBUG_AWB_CCT,
10159 DEVCAMDEBUG_AWB_DECISION,
10160 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010161 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10162 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10163 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010164 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010165 };
10166
Thierry Strudel3d639192016-09-09 11:52:26 -070010167 size_t result_keys_cnt =
10168 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10169
10170 Vector<int32_t> available_result_keys;
10171 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10172 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10173 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10174 }
10175 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10176 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10177 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10178 }
10179 if (supportedFaceDetectMode == 1) {
10180 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10181 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10182 } else if ((supportedFaceDetectMode == 2) ||
10183 (supportedFaceDetectMode == 3)) {
10184 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10185 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10186 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010187#ifndef USE_HAL_3_3
10188 if (hasBlackRegions) {
10189 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10190 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10191 }
10192#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010193
10194 if (gExposeEnableZslKey) {
10195 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10196 }
10197
Thierry Strudel3d639192016-09-09 11:52:26 -070010198 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10199 available_result_keys.array(), available_result_keys.size());
10200
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010201 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010202 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10203 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10204 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10205 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10206 ANDROID_SCALER_CROPPING_TYPE,
10207 ANDROID_SYNC_MAX_LATENCY,
10208 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10209 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10210 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10211 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10212 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10213 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10214 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10215 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10216 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10217 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10218 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10219 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10220 ANDROID_LENS_FACING,
10221 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10222 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10223 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10224 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10225 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10226 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10227 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10228 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10229 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10230 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10231 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10232 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10233 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10234 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10235 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10236 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10237 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10238 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10239 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10240 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010241 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010242 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10243 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10244 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10245 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10246 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10247 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10248 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10249 ANDROID_CONTROL_AVAILABLE_MODES,
10250 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10251 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10252 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10253 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010254 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10255#ifndef USE_HAL_3_3
10256 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10257 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10258#endif
10259 };
10260
10261 Vector<int32_t> available_characteristics_keys;
10262 available_characteristics_keys.appendArray(characteristics_keys_basic,
10263 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10264#ifndef USE_HAL_3_3
10265 if (hasBlackRegions) {
10266 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10267 }
10268#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010269
10270 if (0 <= indexPD) {
10271 int32_t depthKeys[] = {
10272 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10273 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10274 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10275 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10276 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10277 };
10278 available_characteristics_keys.appendArray(depthKeys,
10279 sizeof(depthKeys) / sizeof(depthKeys[0]));
10280 }
10281
Thierry Strudel3d639192016-09-09 11:52:26 -070010282 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010283 available_characteristics_keys.array(),
10284 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010285
10286 /*available stall durations depend on the hw + sw and will be different for different devices */
10287 /*have to add for raw after implementation*/
10288 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10289 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10290
10291 Vector<int64_t> available_stall_durations;
10292 for (uint32_t j = 0; j < stall_formats_count; j++) {
10293 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10294 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10295 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10296 available_stall_durations.add(stall_formats[j]);
10297 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10298 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10299 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10300 }
10301 } else {
10302 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10303 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10304 available_stall_durations.add(stall_formats[j]);
10305 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10306 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10307 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10308 }
10309 }
10310 }
10311 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10312 available_stall_durations.array(),
10313 available_stall_durations.size());
10314
10315 //QCAMERA3_OPAQUE_RAW
10316 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10317 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10318 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10319 case LEGACY_RAW:
10320 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10321 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10322 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10323 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10324 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10325 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10326 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10327 break;
10328 case MIPI_RAW:
10329 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10330 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10331 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10332 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10333 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10334 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10335 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10336 break;
10337 default:
10338 LOGE("unknown opaque_raw_format %d",
10339 gCamCapability[cameraId]->opaque_raw_fmt);
10340 break;
10341 }
10342 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10343
10344 Vector<int32_t> strides;
10345 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10346 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10347 cam_stream_buf_plane_info_t buf_planes;
10348 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10349 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10350 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10351 &gCamCapability[cameraId]->padding_info, &buf_planes);
10352 strides.add(buf_planes.plane_info.mp[0].stride);
10353 }
10354 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10355 strides.size());
10356
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010357 //TBD: remove the following line once backend advertises zzHDR in feature mask
10358 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010359 //Video HDR default
10360 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10361 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010362 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010363 int32_t vhdr_mode[] = {
10364 QCAMERA3_VIDEO_HDR_MODE_OFF,
10365 QCAMERA3_VIDEO_HDR_MODE_ON};
10366
10367 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10368 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10369 vhdr_mode, vhdr_mode_count);
10370 }
10371
Thierry Strudel3d639192016-09-09 11:52:26 -070010372 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10373 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10374 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10375
10376 uint8_t isMonoOnly =
10377 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10378 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10379 &isMonoOnly, 1);
10380
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010381#ifndef USE_HAL_3_3
10382 Vector<int32_t> opaque_size;
10383 for (size_t j = 0; j < scalar_formats_count; j++) {
10384 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10385 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10386 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10387 cam_stream_buf_plane_info_t buf_planes;
10388
10389 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10390 &gCamCapability[cameraId]->padding_info, &buf_planes);
10391
10392 if (rc == 0) {
10393 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10394 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10395 opaque_size.add(buf_planes.plane_info.frame_len);
10396 }else {
10397 LOGE("raw frame calculation failed!");
10398 }
10399 }
10400 }
10401 }
10402
10403 if ((opaque_size.size() > 0) &&
10404 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10405 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10406 else
10407 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10408#endif
10409
Thierry Strudel04e026f2016-10-10 11:27:36 -070010410 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10411 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10412 size = 0;
10413 count = CAM_IR_MODE_MAX;
10414 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10415 for (size_t i = 0; i < count; i++) {
10416 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10417 gCamCapability[cameraId]->supported_ir_modes[i]);
10418 if (NAME_NOT_FOUND != val) {
10419 avail_ir_modes[size] = (int32_t)val;
10420 size++;
10421 }
10422 }
10423 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10424 avail_ir_modes, size);
10425 }
10426
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010427 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10428 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10429 size = 0;
10430 count = CAM_AEC_CONVERGENCE_MAX;
10431 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10432 for (size_t i = 0; i < count; i++) {
10433 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10434 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10435 if (NAME_NOT_FOUND != val) {
10436 available_instant_aec_modes[size] = (int32_t)val;
10437 size++;
10438 }
10439 }
10440 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10441 available_instant_aec_modes, size);
10442 }
10443
Thierry Strudel54dc9782017-02-15 12:12:10 -080010444 int32_t sharpness_range[] = {
10445 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10446 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10447 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10448
10449 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10450 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10451 size = 0;
10452 count = CAM_BINNING_CORRECTION_MODE_MAX;
10453 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10454 for (size_t i = 0; i < count; i++) {
10455 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10456 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10457 gCamCapability[cameraId]->supported_binning_modes[i]);
10458 if (NAME_NOT_FOUND != val) {
10459 avail_binning_modes[size] = (int32_t)val;
10460 size++;
10461 }
10462 }
10463 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10464 avail_binning_modes, size);
10465 }
10466
10467 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10468 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10469 size = 0;
10470 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10471 for (size_t i = 0; i < count; i++) {
10472 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10473 gCamCapability[cameraId]->supported_aec_modes[i]);
10474 if (NAME_NOT_FOUND != val)
10475 available_aec_modes[size++] = val;
10476 }
10477 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10478 available_aec_modes, size);
10479 }
10480
10481 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10482 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10483 size = 0;
10484 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10485 for (size_t i = 0; i < count; i++) {
10486 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10487 gCamCapability[cameraId]->supported_iso_modes[i]);
10488 if (NAME_NOT_FOUND != val)
10489 available_iso_modes[size++] = val;
10490 }
10491 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10492 available_iso_modes, size);
10493 }
10494
10495 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010496 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010497 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10498 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10499 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10500
10501 int32_t available_saturation_range[4];
10502 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10503 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10504 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10505 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10506 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10507 available_saturation_range, 4);
10508
10509 uint8_t is_hdr_values[2];
10510 is_hdr_values[0] = 0;
10511 is_hdr_values[1] = 1;
10512 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10513 is_hdr_values, 2);
10514
10515 float is_hdr_confidence_range[2];
10516 is_hdr_confidence_range[0] = 0.0;
10517 is_hdr_confidence_range[1] = 1.0;
10518 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10519 is_hdr_confidence_range, 2);
10520
Emilian Peev0a972ef2017-03-16 10:25:53 +000010521 size_t eepromLength = strnlen(
10522 reinterpret_cast<const char *>(
10523 gCamCapability[cameraId]->eeprom_version_info),
10524 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10525 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010526 char easelInfo[] = ",E:N";
10527 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10528 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10529 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010530 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10531 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010532 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010533 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10534 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10535 }
10536
Thierry Strudel3d639192016-09-09 11:52:26 -070010537 gStaticMetadata[cameraId] = staticInfo.release();
10538 return rc;
10539}
10540
10541/*===========================================================================
10542 * FUNCTION : makeTable
10543 *
10544 * DESCRIPTION: make a table of sizes
10545 *
10546 * PARAMETERS :
10547 *
10548 *
10549 *==========================================================================*/
10550void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10551 size_t max_size, int32_t *sizeTable)
10552{
10553 size_t j = 0;
10554 if (size > max_size) {
10555 size = max_size;
10556 }
10557 for (size_t i = 0; i < size; i++) {
10558 sizeTable[j] = dimTable[i].width;
10559 sizeTable[j+1] = dimTable[i].height;
10560 j+=2;
10561 }
10562}
10563
10564/*===========================================================================
10565 * FUNCTION : makeFPSTable
10566 *
10567 * DESCRIPTION: make a table of fps ranges
10568 *
10569 * PARAMETERS :
10570 *
10571 *==========================================================================*/
10572void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10573 size_t max_size, int32_t *fpsRangesTable)
10574{
10575 size_t j = 0;
10576 if (size > max_size) {
10577 size = max_size;
10578 }
10579 for (size_t i = 0; i < size; i++) {
10580 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10581 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10582 j+=2;
10583 }
10584}
10585
10586/*===========================================================================
10587 * FUNCTION : makeOverridesList
10588 *
10589 * DESCRIPTION: make a list of scene mode overrides
10590 *
10591 * PARAMETERS :
10592 *
10593 *
10594 *==========================================================================*/
10595void QCamera3HardwareInterface::makeOverridesList(
10596 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10597 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10598{
10599 /*daemon will give a list of overrides for all scene modes.
10600 However we should send the fwk only the overrides for the scene modes
10601 supported by the framework*/
10602 size_t j = 0;
10603 if (size > max_size) {
10604 size = max_size;
10605 }
10606 size_t focus_count = CAM_FOCUS_MODE_MAX;
10607 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10608 focus_count);
10609 for (size_t i = 0; i < size; i++) {
10610 bool supt = false;
10611 size_t index = supported_indexes[i];
10612 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10613 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10614 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10615 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10616 overridesTable[index].awb_mode);
10617 if (NAME_NOT_FOUND != val) {
10618 overridesList[j+1] = (uint8_t)val;
10619 }
10620 uint8_t focus_override = overridesTable[index].af_mode;
10621 for (size_t k = 0; k < focus_count; k++) {
10622 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10623 supt = true;
10624 break;
10625 }
10626 }
10627 if (supt) {
10628 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10629 focus_override);
10630 if (NAME_NOT_FOUND != val) {
10631 overridesList[j+2] = (uint8_t)val;
10632 }
10633 } else {
10634 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10635 }
10636 j+=3;
10637 }
10638}
10639
10640/*===========================================================================
10641 * FUNCTION : filterJpegSizes
10642 *
10643 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10644 * could be downscaled to
10645 *
10646 * PARAMETERS :
10647 *
10648 * RETURN : length of jpegSizes array
10649 *==========================================================================*/
10650
10651size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10652 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10653 uint8_t downscale_factor)
10654{
10655 if (0 == downscale_factor) {
10656 downscale_factor = 1;
10657 }
10658
10659 int32_t min_width = active_array_size.width / downscale_factor;
10660 int32_t min_height = active_array_size.height / downscale_factor;
10661 size_t jpegSizesCnt = 0;
10662 if (processedSizesCnt > maxCount) {
10663 processedSizesCnt = maxCount;
10664 }
10665 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10666 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10667 jpegSizes[jpegSizesCnt] = processedSizes[i];
10668 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10669 jpegSizesCnt += 2;
10670 }
10671 }
10672 return jpegSizesCnt;
10673}
10674
10675/*===========================================================================
10676 * FUNCTION : computeNoiseModelEntryS
10677 *
10678 * DESCRIPTION: function to map a given sensitivity to the S noise
10679 * model parameters in the DNG noise model.
10680 *
10681 * PARAMETERS : sens : the sensor sensitivity
10682 *
10683 ** RETURN : S (sensor amplification) noise
10684 *
10685 *==========================================================================*/
10686double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10687 double s = gCamCapability[mCameraId]->gradient_S * sens +
10688 gCamCapability[mCameraId]->offset_S;
10689 return ((s < 0.0) ? 0.0 : s);
10690}
10691
10692/*===========================================================================
10693 * FUNCTION : computeNoiseModelEntryO
10694 *
10695 * DESCRIPTION: function to map a given sensitivity to the O noise
10696 * model parameters in the DNG noise model.
10697 *
10698 * PARAMETERS : sens : the sensor sensitivity
10699 *
10700 ** RETURN : O (sensor readout) noise
10701 *
10702 *==========================================================================*/
10703double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10704 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10705 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10706 1.0 : (1.0 * sens / max_analog_sens);
10707 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10708 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10709 return ((o < 0.0) ? 0.0 : o);
10710}
10711
10712/*===========================================================================
10713 * FUNCTION : getSensorSensitivity
10714 *
10715 * DESCRIPTION: convert iso_mode to an integer value
10716 *
10717 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10718 *
10719 ** RETURN : sensitivity supported by sensor
10720 *
10721 *==========================================================================*/
10722int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10723{
10724 int32_t sensitivity;
10725
10726 switch (iso_mode) {
10727 case CAM_ISO_MODE_100:
10728 sensitivity = 100;
10729 break;
10730 case CAM_ISO_MODE_200:
10731 sensitivity = 200;
10732 break;
10733 case CAM_ISO_MODE_400:
10734 sensitivity = 400;
10735 break;
10736 case CAM_ISO_MODE_800:
10737 sensitivity = 800;
10738 break;
10739 case CAM_ISO_MODE_1600:
10740 sensitivity = 1600;
10741 break;
10742 default:
10743 sensitivity = -1;
10744 break;
10745 }
10746 return sensitivity;
10747}
10748
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010749int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010750 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010751 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10752 // to connect to Easel.
10753 bool doNotpowerOnEasel =
10754 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10755
10756 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010757 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10758 return OK;
10759 }
10760
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010761 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010762 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010763 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010764 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010765 return res;
10766 }
10767
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010768 EaselManagerClientOpened = true;
10769
10770 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010771 if (res != OK) {
10772 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10773 }
10774
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010775 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010776 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010777
10778 // Expose enableZsl key only when HDR+ mode is enabled.
10779 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010780 }
10781
10782 return OK;
10783}
10784
Thierry Strudel3d639192016-09-09 11:52:26 -070010785/*===========================================================================
10786 * FUNCTION : getCamInfo
10787 *
10788 * DESCRIPTION: query camera capabilities
10789 *
10790 * PARAMETERS :
10791 * @cameraId : camera Id
10792 * @info : camera info struct to be filled in with camera capabilities
10793 *
10794 * RETURN : int type of status
10795 * NO_ERROR -- success
10796 * none-zero failure code
10797 *==========================================================================*/
10798int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10799 struct camera_info *info)
10800{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010801 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010802 int rc = 0;
10803
10804 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010805
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010806 {
10807 Mutex::Autolock l(gHdrPlusClientLock);
10808 rc = initHdrPlusClientLocked();
10809 if (rc != OK) {
10810 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10811 pthread_mutex_unlock(&gCamLock);
10812 return rc;
10813 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010814 }
10815
Thierry Strudel3d639192016-09-09 11:52:26 -070010816 if (NULL == gCamCapability[cameraId]) {
10817 rc = initCapabilities(cameraId);
10818 if (rc < 0) {
10819 pthread_mutex_unlock(&gCamLock);
10820 return rc;
10821 }
10822 }
10823
10824 if (NULL == gStaticMetadata[cameraId]) {
10825 rc = initStaticMetadata(cameraId);
10826 if (rc < 0) {
10827 pthread_mutex_unlock(&gCamLock);
10828 return rc;
10829 }
10830 }
10831
10832 switch(gCamCapability[cameraId]->position) {
10833 case CAM_POSITION_BACK:
10834 case CAM_POSITION_BACK_AUX:
10835 info->facing = CAMERA_FACING_BACK;
10836 break;
10837
10838 case CAM_POSITION_FRONT:
10839 case CAM_POSITION_FRONT_AUX:
10840 info->facing = CAMERA_FACING_FRONT;
10841 break;
10842
10843 default:
10844 LOGE("Unknown position type %d for camera id:%d",
10845 gCamCapability[cameraId]->position, cameraId);
10846 rc = -1;
10847 break;
10848 }
10849
10850
10851 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010852#ifndef USE_HAL_3_3
10853 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10854#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010855 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010856#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010857 info->static_camera_characteristics = gStaticMetadata[cameraId];
10858
10859 //For now assume both cameras can operate independently.
10860 info->conflicting_devices = NULL;
10861 info->conflicting_devices_length = 0;
10862
10863 //resource cost is 100 * MIN(1.0, m/M),
10864 //where m is throughput requirement with maximum stream configuration
10865 //and M is CPP maximum throughput.
10866 float max_fps = 0.0;
10867 for (uint32_t i = 0;
10868 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10869 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10870 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10871 }
10872 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10873 gCamCapability[cameraId]->active_array_size.width *
10874 gCamCapability[cameraId]->active_array_size.height * max_fps /
10875 gCamCapability[cameraId]->max_pixel_bandwidth;
10876 info->resource_cost = 100 * MIN(1.0, ratio);
10877 LOGI("camera %d resource cost is %d", cameraId,
10878 info->resource_cost);
10879
10880 pthread_mutex_unlock(&gCamLock);
10881 return rc;
10882}
10883
10884/*===========================================================================
10885 * FUNCTION : translateCapabilityToMetadata
10886 *
10887 * DESCRIPTION: translate the capability into camera_metadata_t
10888 *
10889 * PARAMETERS : type of the request
10890 *
10891 *
10892 * RETURN : success: camera_metadata_t*
10893 * failure: NULL
10894 *
10895 *==========================================================================*/
10896camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10897{
10898 if (mDefaultMetadata[type] != NULL) {
10899 return mDefaultMetadata[type];
10900 }
10901 //first time we are handling this request
10902 //fill up the metadata structure using the wrapper class
10903 CameraMetadata settings;
10904 //translate from cam_capability_t to camera_metadata_tag_t
10905 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10906 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10907 int32_t defaultRequestID = 0;
10908 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10909
10910 /* OIS disable */
10911 char ois_prop[PROPERTY_VALUE_MAX];
10912 memset(ois_prop, 0, sizeof(ois_prop));
10913 property_get("persist.camera.ois.disable", ois_prop, "0");
10914 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10915
10916 /* Force video to use OIS */
10917 char videoOisProp[PROPERTY_VALUE_MAX];
10918 memset(videoOisProp, 0, sizeof(videoOisProp));
10919 property_get("persist.camera.ois.video", videoOisProp, "1");
10920 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010921
10922 // Hybrid AE enable/disable
10923 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10924 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10925 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10926 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10927
Thierry Strudel3d639192016-09-09 11:52:26 -070010928 uint8_t controlIntent = 0;
10929 uint8_t focusMode;
10930 uint8_t vsMode;
10931 uint8_t optStabMode;
10932 uint8_t cacMode;
10933 uint8_t edge_mode;
10934 uint8_t noise_red_mode;
10935 uint8_t tonemap_mode;
10936 bool highQualityModeEntryAvailable = FALSE;
10937 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010938 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010939 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10940 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010941 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010942 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010943 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010944
Thierry Strudel3d639192016-09-09 11:52:26 -070010945 switch (type) {
10946 case CAMERA3_TEMPLATE_PREVIEW:
10947 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10948 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10949 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10950 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10951 edge_mode = ANDROID_EDGE_MODE_FAST;
10952 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10953 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10954 break;
10955 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10956 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10957 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10958 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10959 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10960 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10961 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10962 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10963 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10964 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10965 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10966 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10967 highQualityModeEntryAvailable = TRUE;
10968 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10969 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10970 fastModeEntryAvailable = TRUE;
10971 }
10972 }
10973 if (highQualityModeEntryAvailable) {
10974 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10975 } else if (fastModeEntryAvailable) {
10976 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10977 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010978 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10979 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10980 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010981 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010982 break;
10983 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10984 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10985 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10986 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010987 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10988 edge_mode = ANDROID_EDGE_MODE_FAST;
10989 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10990 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10991 if (forceVideoOis)
10992 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10993 break;
10994 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10995 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10996 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10997 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010998 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10999 edge_mode = ANDROID_EDGE_MODE_FAST;
11000 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11001 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11002 if (forceVideoOis)
11003 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11004 break;
11005 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11006 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11007 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11008 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11009 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11010 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11011 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11012 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11013 break;
11014 case CAMERA3_TEMPLATE_MANUAL:
11015 edge_mode = ANDROID_EDGE_MODE_FAST;
11016 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11017 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11018 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11019 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11020 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11021 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11022 break;
11023 default:
11024 edge_mode = ANDROID_EDGE_MODE_FAST;
11025 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11026 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11027 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11028 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11029 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11030 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11031 break;
11032 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011033 // Set CAC to OFF if underlying device doesn't support
11034 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11035 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11036 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011037 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11038 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11039 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11040 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11041 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11042 }
11043 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011044 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011045 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011046
11047 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11048 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11049 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11050 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11051 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11052 || ois_disable)
11053 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11054 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011055 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011056
11057 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11058 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11059
11060 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11061 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11062
11063 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11064 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11065
11066 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11067 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11068
11069 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11070 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11071
11072 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11073 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11074
11075 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11076 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11077
11078 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11079 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11080
11081 /*flash*/
11082 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11083 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11084
11085 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11086 settings.update(ANDROID_FLASH_FIRING_POWER,
11087 &flashFiringLevel, 1);
11088
11089 /* lens */
11090 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11091 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11092
11093 if (gCamCapability[mCameraId]->filter_densities_count) {
11094 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11095 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11096 gCamCapability[mCameraId]->filter_densities_count);
11097 }
11098
11099 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11100 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11101
Thierry Strudel3d639192016-09-09 11:52:26 -070011102 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11103 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11104
11105 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11106 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11107
11108 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11109 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11110
11111 /* face detection (default to OFF) */
11112 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11113 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11114
Thierry Strudel54dc9782017-02-15 12:12:10 -080011115 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11116 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011117
11118 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11119 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11120
11121 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11122 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11123
Thierry Strudel3d639192016-09-09 11:52:26 -070011124
11125 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11126 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11127
11128 /* Exposure time(Update the Min Exposure Time)*/
11129 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11130 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11131
11132 /* frame duration */
11133 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11134 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11135
11136 /* sensitivity */
11137 static const int32_t default_sensitivity = 100;
11138 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011139#ifndef USE_HAL_3_3
11140 static const int32_t default_isp_sensitivity =
11141 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11142 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11143#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011144
11145 /*edge mode*/
11146 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11147
11148 /*noise reduction mode*/
11149 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11150
11151 /*color correction mode*/
11152 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11153 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11154
11155 /*transform matrix mode*/
11156 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11157
11158 int32_t scaler_crop_region[4];
11159 scaler_crop_region[0] = 0;
11160 scaler_crop_region[1] = 0;
11161 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11162 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11163 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11164
11165 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11166 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11167
11168 /*focus distance*/
11169 float focus_distance = 0.0;
11170 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11171
11172 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011173 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011174 float max_range = 0.0;
11175 float max_fixed_fps = 0.0;
11176 int32_t fps_range[2] = {0, 0};
11177 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11178 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011179 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11180 TEMPLATE_MAX_PREVIEW_FPS) {
11181 continue;
11182 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011183 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11184 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11185 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11186 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11187 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11188 if (range > max_range) {
11189 fps_range[0] =
11190 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11191 fps_range[1] =
11192 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11193 max_range = range;
11194 }
11195 } else {
11196 if (range < 0.01 && max_fixed_fps <
11197 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11198 fps_range[0] =
11199 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11200 fps_range[1] =
11201 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11202 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11203 }
11204 }
11205 }
11206 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11207
11208 /*precapture trigger*/
11209 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11210 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11211
11212 /*af trigger*/
11213 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11214 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11215
11216 /* ae & af regions */
11217 int32_t active_region[] = {
11218 gCamCapability[mCameraId]->active_array_size.left,
11219 gCamCapability[mCameraId]->active_array_size.top,
11220 gCamCapability[mCameraId]->active_array_size.left +
11221 gCamCapability[mCameraId]->active_array_size.width,
11222 gCamCapability[mCameraId]->active_array_size.top +
11223 gCamCapability[mCameraId]->active_array_size.height,
11224 0};
11225 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11226 sizeof(active_region) / sizeof(active_region[0]));
11227 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11228 sizeof(active_region) / sizeof(active_region[0]));
11229
11230 /* black level lock */
11231 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11232 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11233
Thierry Strudel3d639192016-09-09 11:52:26 -070011234 //special defaults for manual template
11235 if (type == CAMERA3_TEMPLATE_MANUAL) {
11236 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11237 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11238
11239 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11240 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11241
11242 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11243 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11244
11245 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11246 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11247
11248 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11249 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11250
11251 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11252 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11253 }
11254
11255
11256 /* TNR
11257 * We'll use this location to determine which modes TNR will be set.
11258 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11259 * This is not to be confused with linking on a per stream basis that decision
11260 * is still on per-session basis and will be handled as part of config stream
11261 */
11262 uint8_t tnr_enable = 0;
11263
11264 if (m_bTnrPreview || m_bTnrVideo) {
11265
11266 switch (type) {
11267 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11268 tnr_enable = 1;
11269 break;
11270
11271 default:
11272 tnr_enable = 0;
11273 break;
11274 }
11275
11276 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11277 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11278 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11279
11280 LOGD("TNR:%d with process plate %d for template:%d",
11281 tnr_enable, tnr_process_type, type);
11282 }
11283
11284 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011285 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011286 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11287
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011288 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011289 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11290
Shuzhen Wang920ea402017-05-03 08:49:39 -070011291 uint8_t related_camera_id = mCameraId;
11292 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011293
11294 /* CDS default */
11295 char prop[PROPERTY_VALUE_MAX];
11296 memset(prop, 0, sizeof(prop));
11297 property_get("persist.camera.CDS", prop, "Auto");
11298 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11299 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11300 if (CAM_CDS_MODE_MAX == cds_mode) {
11301 cds_mode = CAM_CDS_MODE_AUTO;
11302 }
11303
11304 /* Disabling CDS in templates which have TNR enabled*/
11305 if (tnr_enable)
11306 cds_mode = CAM_CDS_MODE_OFF;
11307
11308 int32_t mode = cds_mode;
11309 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011310
Thierry Strudel269c81a2016-10-12 12:13:59 -070011311 /* Manual Convergence AEC Speed is disabled by default*/
11312 float default_aec_speed = 0;
11313 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11314
11315 /* Manual Convergence AWB Speed is disabled by default*/
11316 float default_awb_speed = 0;
11317 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11318
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011319 // Set instant AEC to normal convergence by default
11320 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11321 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11322
Shuzhen Wang19463d72016-03-08 11:09:52 -080011323 /* hybrid ae */
11324 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11325
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011326 if (gExposeEnableZslKey) {
11327 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11328 }
11329
Thierry Strudel3d639192016-09-09 11:52:26 -070011330 mDefaultMetadata[type] = settings.release();
11331
11332 return mDefaultMetadata[type];
11333}
11334
11335/*===========================================================================
11336 * FUNCTION : setFrameParameters
11337 *
11338 * DESCRIPTION: set parameters per frame as requested in the metadata from
11339 * framework
11340 *
11341 * PARAMETERS :
11342 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011343 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011344 * @blob_request: Whether this request is a blob request or not
11345 *
11346 * RETURN : success: NO_ERROR
11347 * failure:
11348 *==========================================================================*/
11349int QCamera3HardwareInterface::setFrameParameters(
11350 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011351 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011352 int blob_request,
11353 uint32_t snapshotStreamId)
11354{
11355 /*translate from camera_metadata_t type to parm_type_t*/
11356 int rc = 0;
11357 int32_t hal_version = CAM_HAL_V3;
11358
11359 clear_metadata_buffer(mParameters);
11360 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11361 LOGE("Failed to set hal version in the parameters");
11362 return BAD_VALUE;
11363 }
11364
11365 /*we need to update the frame number in the parameters*/
11366 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11367 request->frame_number)) {
11368 LOGE("Failed to set the frame number in the parameters");
11369 return BAD_VALUE;
11370 }
11371
11372 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011373 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011374 LOGE("Failed to set stream type mask in the parameters");
11375 return BAD_VALUE;
11376 }
11377
11378 if (mUpdateDebugLevel) {
11379 uint32_t dummyDebugLevel = 0;
11380 /* The value of dummyDebugLevel is irrelavent. On
11381 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11382 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11383 dummyDebugLevel)) {
11384 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11385 return BAD_VALUE;
11386 }
11387 mUpdateDebugLevel = false;
11388 }
11389
11390 if(request->settings != NULL){
11391 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11392 if (blob_request)
11393 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11394 }
11395
11396 return rc;
11397}
11398
11399/*===========================================================================
11400 * FUNCTION : setReprocParameters
11401 *
11402 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11403 * return it.
11404 *
11405 * PARAMETERS :
11406 * @request : request that needs to be serviced
11407 *
11408 * RETURN : success: NO_ERROR
11409 * failure:
11410 *==========================================================================*/
11411int32_t QCamera3HardwareInterface::setReprocParameters(
11412 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11413 uint32_t snapshotStreamId)
11414{
11415 /*translate from camera_metadata_t type to parm_type_t*/
11416 int rc = 0;
11417
11418 if (NULL == request->settings){
11419 LOGE("Reprocess settings cannot be NULL");
11420 return BAD_VALUE;
11421 }
11422
11423 if (NULL == reprocParam) {
11424 LOGE("Invalid reprocessing metadata buffer");
11425 return BAD_VALUE;
11426 }
11427 clear_metadata_buffer(reprocParam);
11428
11429 /*we need to update the frame number in the parameters*/
11430 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11431 request->frame_number)) {
11432 LOGE("Failed to set the frame number in the parameters");
11433 return BAD_VALUE;
11434 }
11435
11436 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11437 if (rc < 0) {
11438 LOGE("Failed to translate reproc request");
11439 return rc;
11440 }
11441
11442 CameraMetadata frame_settings;
11443 frame_settings = request->settings;
11444 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11445 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11446 int32_t *crop_count =
11447 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11448 int32_t *crop_data =
11449 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11450 int32_t *roi_map =
11451 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11452 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11453 cam_crop_data_t crop_meta;
11454 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11455 crop_meta.num_of_streams = 1;
11456 crop_meta.crop_info[0].crop.left = crop_data[0];
11457 crop_meta.crop_info[0].crop.top = crop_data[1];
11458 crop_meta.crop_info[0].crop.width = crop_data[2];
11459 crop_meta.crop_info[0].crop.height = crop_data[3];
11460
11461 crop_meta.crop_info[0].roi_map.left =
11462 roi_map[0];
11463 crop_meta.crop_info[0].roi_map.top =
11464 roi_map[1];
11465 crop_meta.crop_info[0].roi_map.width =
11466 roi_map[2];
11467 crop_meta.crop_info[0].roi_map.height =
11468 roi_map[3];
11469
11470 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11471 rc = BAD_VALUE;
11472 }
11473 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11474 request->input_buffer->stream,
11475 crop_meta.crop_info[0].crop.left,
11476 crop_meta.crop_info[0].crop.top,
11477 crop_meta.crop_info[0].crop.width,
11478 crop_meta.crop_info[0].crop.height);
11479 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11480 request->input_buffer->stream,
11481 crop_meta.crop_info[0].roi_map.left,
11482 crop_meta.crop_info[0].roi_map.top,
11483 crop_meta.crop_info[0].roi_map.width,
11484 crop_meta.crop_info[0].roi_map.height);
11485 } else {
11486 LOGE("Invalid reprocess crop count %d!", *crop_count);
11487 }
11488 } else {
11489 LOGE("No crop data from matching output stream");
11490 }
11491
11492 /* These settings are not needed for regular requests so handle them specially for
11493 reprocess requests; information needed for EXIF tags */
11494 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11495 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11496 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11497 if (NAME_NOT_FOUND != val) {
11498 uint32_t flashMode = (uint32_t)val;
11499 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11500 rc = BAD_VALUE;
11501 }
11502 } else {
11503 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11504 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11505 }
11506 } else {
11507 LOGH("No flash mode in reprocess settings");
11508 }
11509
11510 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11511 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11512 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11513 rc = BAD_VALUE;
11514 }
11515 } else {
11516 LOGH("No flash state in reprocess settings");
11517 }
11518
11519 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11520 uint8_t *reprocessFlags =
11521 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11522 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11523 *reprocessFlags)) {
11524 rc = BAD_VALUE;
11525 }
11526 }
11527
Thierry Strudel54dc9782017-02-15 12:12:10 -080011528 // Add exif debug data to internal metadata
11529 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11530 mm_jpeg_debug_exif_params_t *debug_params =
11531 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11532 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11533 // AE
11534 if (debug_params->ae_debug_params_valid == TRUE) {
11535 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11536 debug_params->ae_debug_params);
11537 }
11538 // AWB
11539 if (debug_params->awb_debug_params_valid == TRUE) {
11540 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11541 debug_params->awb_debug_params);
11542 }
11543 // AF
11544 if (debug_params->af_debug_params_valid == TRUE) {
11545 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11546 debug_params->af_debug_params);
11547 }
11548 // ASD
11549 if (debug_params->asd_debug_params_valid == TRUE) {
11550 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11551 debug_params->asd_debug_params);
11552 }
11553 // Stats
11554 if (debug_params->stats_debug_params_valid == TRUE) {
11555 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11556 debug_params->stats_debug_params);
11557 }
11558 // BE Stats
11559 if (debug_params->bestats_debug_params_valid == TRUE) {
11560 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11561 debug_params->bestats_debug_params);
11562 }
11563 // BHIST
11564 if (debug_params->bhist_debug_params_valid == TRUE) {
11565 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11566 debug_params->bhist_debug_params);
11567 }
11568 // 3A Tuning
11569 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11570 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11571 debug_params->q3a_tuning_debug_params);
11572 }
11573 }
11574
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011575 // Add metadata which reprocess needs
11576 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11577 cam_reprocess_info_t *repro_info =
11578 (cam_reprocess_info_t *)frame_settings.find
11579 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011580 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011581 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011582 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011583 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011584 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011585 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011586 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011587 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011588 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011589 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011590 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011591 repro_info->pipeline_flip);
11592 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11593 repro_info->af_roi);
11594 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11595 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011596 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11597 CAM_INTF_PARM_ROTATION metadata then has been added in
11598 translateToHalMetadata. HAL need to keep this new rotation
11599 metadata. Otherwise, the old rotation info saved in the vendor tag
11600 would be used */
11601 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11602 CAM_INTF_PARM_ROTATION, reprocParam) {
11603 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11604 } else {
11605 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011606 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011607 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011608 }
11609
11610 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11611 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11612 roi.width and roi.height would be the final JPEG size.
11613 For now, HAL only checks this for reprocess request */
11614 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11615 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11616 uint8_t *enable =
11617 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11618 if (*enable == TRUE) {
11619 int32_t *crop_data =
11620 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11621 cam_stream_crop_info_t crop_meta;
11622 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11623 crop_meta.stream_id = 0;
11624 crop_meta.crop.left = crop_data[0];
11625 crop_meta.crop.top = crop_data[1];
11626 crop_meta.crop.width = crop_data[2];
11627 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011628 // The JPEG crop roi should match cpp output size
11629 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11630 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11631 crop_meta.roi_map.left = 0;
11632 crop_meta.roi_map.top = 0;
11633 crop_meta.roi_map.width = cpp_crop->crop.width;
11634 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011635 }
11636 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11637 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011638 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011639 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011640 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11641 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011642 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011643 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11644
11645 // Add JPEG scale information
11646 cam_dimension_t scale_dim;
11647 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11648 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11649 int32_t *roi =
11650 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11651 scale_dim.width = roi[2];
11652 scale_dim.height = roi[3];
11653 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11654 scale_dim);
11655 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11656 scale_dim.width, scale_dim.height, mCameraId);
11657 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011658 }
11659 }
11660
11661 return rc;
11662}
11663
11664/*===========================================================================
11665 * FUNCTION : saveRequestSettings
11666 *
11667 * DESCRIPTION: Add any settings that might have changed to the request settings
11668 * and save the settings to be applied on the frame
11669 *
11670 * PARAMETERS :
11671 * @jpegMetadata : the extracted and/or modified jpeg metadata
11672 * @request : request with initial settings
11673 *
11674 * RETURN :
11675 * camera_metadata_t* : pointer to the saved request settings
11676 *==========================================================================*/
11677camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11678 const CameraMetadata &jpegMetadata,
11679 camera3_capture_request_t *request)
11680{
11681 camera_metadata_t *resultMetadata;
11682 CameraMetadata camMetadata;
11683 camMetadata = request->settings;
11684
11685 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11686 int32_t thumbnail_size[2];
11687 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11688 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11689 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11690 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11691 }
11692
11693 if (request->input_buffer != NULL) {
11694 uint8_t reprocessFlags = 1;
11695 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11696 (uint8_t*)&reprocessFlags,
11697 sizeof(reprocessFlags));
11698 }
11699
11700 resultMetadata = camMetadata.release();
11701 return resultMetadata;
11702}
11703
11704/*===========================================================================
11705 * FUNCTION : setHalFpsRange
11706 *
11707 * DESCRIPTION: set FPS range parameter
11708 *
11709 *
11710 * PARAMETERS :
11711 * @settings : Metadata from framework
11712 * @hal_metadata: Metadata buffer
11713 *
11714 *
11715 * RETURN : success: NO_ERROR
11716 * failure:
11717 *==========================================================================*/
11718int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11719 metadata_buffer_t *hal_metadata)
11720{
11721 int32_t rc = NO_ERROR;
11722 cam_fps_range_t fps_range;
11723 fps_range.min_fps = (float)
11724 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11725 fps_range.max_fps = (float)
11726 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11727 fps_range.video_min_fps = fps_range.min_fps;
11728 fps_range.video_max_fps = fps_range.max_fps;
11729
11730 LOGD("aeTargetFpsRange fps: [%f %f]",
11731 fps_range.min_fps, fps_range.max_fps);
11732 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11733 * follows:
11734 * ---------------------------------------------------------------|
11735 * Video stream is absent in configure_streams |
11736 * (Camcorder preview before the first video record |
11737 * ---------------------------------------------------------------|
11738 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11739 * | | | vid_min/max_fps|
11740 * ---------------------------------------------------------------|
11741 * NO | [ 30, 240] | 240 | [240, 240] |
11742 * |-------------|-------------|----------------|
11743 * | [240, 240] | 240 | [240, 240] |
11744 * ---------------------------------------------------------------|
11745 * Video stream is present in configure_streams |
11746 * ---------------------------------------------------------------|
11747 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11748 * | | | vid_min/max_fps|
11749 * ---------------------------------------------------------------|
11750 * NO | [ 30, 240] | 240 | [240, 240] |
11751 * (camcorder prev |-------------|-------------|----------------|
11752 * after video rec | [240, 240] | 240 | [240, 240] |
11753 * is stopped) | | | |
11754 * ---------------------------------------------------------------|
11755 * YES | [ 30, 240] | 240 | [240, 240] |
11756 * |-------------|-------------|----------------|
11757 * | [240, 240] | 240 | [240, 240] |
11758 * ---------------------------------------------------------------|
11759 * When Video stream is absent in configure_streams,
11760 * preview fps = sensor_fps / batchsize
11761 * Eg: for 240fps at batchSize 4, preview = 60fps
11762 * for 120fps at batchSize 4, preview = 30fps
11763 *
11764 * When video stream is present in configure_streams, preview fps is as per
11765 * the ratio of preview buffers to video buffers requested in process
11766 * capture request
11767 */
11768 mBatchSize = 0;
11769 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11770 fps_range.min_fps = fps_range.video_max_fps;
11771 fps_range.video_min_fps = fps_range.video_max_fps;
11772 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11773 fps_range.max_fps);
11774 if (NAME_NOT_FOUND != val) {
11775 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11776 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11777 return BAD_VALUE;
11778 }
11779
11780 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11781 /* If batchmode is currently in progress and the fps changes,
11782 * set the flag to restart the sensor */
11783 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11784 (mHFRVideoFps != fps_range.max_fps)) {
11785 mNeedSensorRestart = true;
11786 }
11787 mHFRVideoFps = fps_range.max_fps;
11788 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11789 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11790 mBatchSize = MAX_HFR_BATCH_SIZE;
11791 }
11792 }
11793 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11794
11795 }
11796 } else {
11797 /* HFR mode is session param in backend/ISP. This should be reset when
11798 * in non-HFR mode */
11799 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11800 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11801 return BAD_VALUE;
11802 }
11803 }
11804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11805 return BAD_VALUE;
11806 }
11807 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11808 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11809 return rc;
11810}
11811
11812/*===========================================================================
11813 * FUNCTION : translateToHalMetadata
11814 *
11815 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11816 *
11817 *
11818 * PARAMETERS :
11819 * @request : request sent from framework
11820 *
11821 *
11822 * RETURN : success: NO_ERROR
11823 * failure:
11824 *==========================================================================*/
11825int QCamera3HardwareInterface::translateToHalMetadata
11826 (const camera3_capture_request_t *request,
11827 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011828 uint32_t snapshotStreamId) {
11829 if (request == nullptr || hal_metadata == nullptr) {
11830 return BAD_VALUE;
11831 }
11832
11833 int64_t minFrameDuration = getMinFrameDuration(request);
11834
11835 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11836 minFrameDuration);
11837}
11838
11839int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11840 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11841 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11842
Thierry Strudel3d639192016-09-09 11:52:26 -070011843 int rc = 0;
11844 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011845 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011846
11847 /* Do not change the order of the following list unless you know what you are
11848 * doing.
11849 * The order is laid out in such a way that parameters in the front of the table
11850 * may be used to override the parameters later in the table. Examples are:
11851 * 1. META_MODE should precede AEC/AWB/AF MODE
11852 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11853 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11854 * 4. Any mode should precede it's corresponding settings
11855 */
11856 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11857 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11858 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11859 rc = BAD_VALUE;
11860 }
11861 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11862 if (rc != NO_ERROR) {
11863 LOGE("extractSceneMode failed");
11864 }
11865 }
11866
11867 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11868 uint8_t fwk_aeMode =
11869 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11870 uint8_t aeMode;
11871 int32_t redeye;
11872
11873 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11874 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011875 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11876 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011877 } else {
11878 aeMode = CAM_AE_MODE_ON;
11879 }
11880 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11881 redeye = 1;
11882 } else {
11883 redeye = 0;
11884 }
11885
11886 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11887 fwk_aeMode);
11888 if (NAME_NOT_FOUND != val) {
11889 int32_t flashMode = (int32_t)val;
11890 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11891 }
11892
11893 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11894 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11895 rc = BAD_VALUE;
11896 }
11897 }
11898
11899 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11900 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11901 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11902 fwk_whiteLevel);
11903 if (NAME_NOT_FOUND != val) {
11904 uint8_t whiteLevel = (uint8_t)val;
11905 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11906 rc = BAD_VALUE;
11907 }
11908 }
11909 }
11910
11911 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11912 uint8_t fwk_cacMode =
11913 frame_settings.find(
11914 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11915 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11916 fwk_cacMode);
11917 if (NAME_NOT_FOUND != val) {
11918 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11919 bool entryAvailable = FALSE;
11920 // Check whether Frameworks set CAC mode is supported in device or not
11921 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11922 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11923 entryAvailable = TRUE;
11924 break;
11925 }
11926 }
11927 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11928 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11929 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11930 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11931 if (entryAvailable == FALSE) {
11932 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11933 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11934 } else {
11935 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11936 // High is not supported and so set the FAST as spec say's underlying
11937 // device implementation can be the same for both modes.
11938 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11939 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11940 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11941 // in order to avoid the fps drop due to high quality
11942 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11943 } else {
11944 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11945 }
11946 }
11947 }
11948 LOGD("Final cacMode is %d", cacMode);
11949 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11950 rc = BAD_VALUE;
11951 }
11952 } else {
11953 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11954 }
11955 }
11956
Thierry Strudel2896d122017-02-23 19:18:03 -080011957 char af_value[PROPERTY_VALUE_MAX];
11958 property_get("persist.camera.af.infinity", af_value, "0");
11959
Jason Lee84ae9972017-02-24 13:24:24 -080011960 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011961 if (atoi(af_value) == 0) {
11962 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011963 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011964 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11965 fwk_focusMode);
11966 if (NAME_NOT_FOUND != val) {
11967 uint8_t focusMode = (uint8_t)val;
11968 LOGD("set focus mode %d", focusMode);
11969 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11970 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11971 rc = BAD_VALUE;
11972 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011973 }
11974 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011975 } else {
11976 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11977 LOGE("Focus forced to infinity %d", focusMode);
11978 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11979 rc = BAD_VALUE;
11980 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011981 }
11982
Jason Lee84ae9972017-02-24 13:24:24 -080011983 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11984 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011985 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11986 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11987 focalDistance)) {
11988 rc = BAD_VALUE;
11989 }
11990 }
11991
11992 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11993 uint8_t fwk_antibandingMode =
11994 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11995 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11996 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11997 if (NAME_NOT_FOUND != val) {
11998 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011999 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12000 if (m60HzZone) {
12001 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12002 } else {
12003 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12004 }
12005 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012006 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12007 hal_antibandingMode)) {
12008 rc = BAD_VALUE;
12009 }
12010 }
12011 }
12012
12013 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12014 int32_t expCompensation = frame_settings.find(
12015 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12016 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12017 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12018 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12019 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012020 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012021 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12022 expCompensation)) {
12023 rc = BAD_VALUE;
12024 }
12025 }
12026
12027 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12028 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12029 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12030 rc = BAD_VALUE;
12031 }
12032 }
12033 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12034 rc = setHalFpsRange(frame_settings, hal_metadata);
12035 if (rc != NO_ERROR) {
12036 LOGE("setHalFpsRange failed");
12037 }
12038 }
12039
12040 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12041 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12042 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12043 rc = BAD_VALUE;
12044 }
12045 }
12046
12047 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12048 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12049 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12050 fwk_effectMode);
12051 if (NAME_NOT_FOUND != val) {
12052 uint8_t effectMode = (uint8_t)val;
12053 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12054 rc = BAD_VALUE;
12055 }
12056 }
12057 }
12058
12059 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12060 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12061 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12062 colorCorrectMode)) {
12063 rc = BAD_VALUE;
12064 }
12065 }
12066
12067 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12068 cam_color_correct_gains_t colorCorrectGains;
12069 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12070 colorCorrectGains.gains[i] =
12071 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12072 }
12073 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12074 colorCorrectGains)) {
12075 rc = BAD_VALUE;
12076 }
12077 }
12078
12079 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12080 cam_color_correct_matrix_t colorCorrectTransform;
12081 cam_rational_type_t transform_elem;
12082 size_t num = 0;
12083 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12084 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12085 transform_elem.numerator =
12086 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12087 transform_elem.denominator =
12088 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12089 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12090 num++;
12091 }
12092 }
12093 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12094 colorCorrectTransform)) {
12095 rc = BAD_VALUE;
12096 }
12097 }
12098
12099 cam_trigger_t aecTrigger;
12100 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12101 aecTrigger.trigger_id = -1;
12102 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12103 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12104 aecTrigger.trigger =
12105 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12106 aecTrigger.trigger_id =
12107 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12108 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12109 aecTrigger)) {
12110 rc = BAD_VALUE;
12111 }
12112 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12113 aecTrigger.trigger, aecTrigger.trigger_id);
12114 }
12115
12116 /*af_trigger must come with a trigger id*/
12117 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12118 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12119 cam_trigger_t af_trigger;
12120 af_trigger.trigger =
12121 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12122 af_trigger.trigger_id =
12123 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12124 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12125 rc = BAD_VALUE;
12126 }
12127 LOGD("AfTrigger: %d AfTriggerID: %d",
12128 af_trigger.trigger, af_trigger.trigger_id);
12129 }
12130
12131 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12132 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12133 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12134 rc = BAD_VALUE;
12135 }
12136 }
12137 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12138 cam_edge_application_t edge_application;
12139 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012140
Thierry Strudel3d639192016-09-09 11:52:26 -070012141 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12142 edge_application.sharpness = 0;
12143 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012144 edge_application.sharpness =
12145 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12146 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12147 int32_t sharpness =
12148 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12149 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12150 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12151 LOGD("Setting edge mode sharpness %d", sharpness);
12152 edge_application.sharpness = sharpness;
12153 }
12154 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012155 }
12156 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12157 rc = BAD_VALUE;
12158 }
12159 }
12160
12161 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12162 int32_t respectFlashMode = 1;
12163 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12164 uint8_t fwk_aeMode =
12165 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012166 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12167 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12168 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012169 respectFlashMode = 0;
12170 LOGH("AE Mode controls flash, ignore android.flash.mode");
12171 }
12172 }
12173 if (respectFlashMode) {
12174 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12175 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12176 LOGH("flash mode after mapping %d", val);
12177 // To check: CAM_INTF_META_FLASH_MODE usage
12178 if (NAME_NOT_FOUND != val) {
12179 uint8_t flashMode = (uint8_t)val;
12180 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12181 rc = BAD_VALUE;
12182 }
12183 }
12184 }
12185 }
12186
12187 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12188 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12189 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12190 rc = BAD_VALUE;
12191 }
12192 }
12193
12194 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12195 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12196 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12197 flashFiringTime)) {
12198 rc = BAD_VALUE;
12199 }
12200 }
12201
12202 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12203 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12204 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12205 hotPixelMode)) {
12206 rc = BAD_VALUE;
12207 }
12208 }
12209
12210 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12211 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12212 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12213 lensAperture)) {
12214 rc = BAD_VALUE;
12215 }
12216 }
12217
12218 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12219 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12220 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12221 filterDensity)) {
12222 rc = BAD_VALUE;
12223 }
12224 }
12225
12226 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12227 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12228 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12229 focalLength)) {
12230 rc = BAD_VALUE;
12231 }
12232 }
12233
12234 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12235 uint8_t optStabMode =
12236 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12237 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12238 optStabMode)) {
12239 rc = BAD_VALUE;
12240 }
12241 }
12242
12243 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12244 uint8_t videoStabMode =
12245 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12246 LOGD("videoStabMode from APP = %d", videoStabMode);
12247 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12248 videoStabMode)) {
12249 rc = BAD_VALUE;
12250 }
12251 }
12252
12253
12254 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12255 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12256 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12257 noiseRedMode)) {
12258 rc = BAD_VALUE;
12259 }
12260 }
12261
12262 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12263 float reprocessEffectiveExposureFactor =
12264 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12266 reprocessEffectiveExposureFactor)) {
12267 rc = BAD_VALUE;
12268 }
12269 }
12270
12271 cam_crop_region_t scalerCropRegion;
12272 bool scalerCropSet = false;
12273 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12274 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12275 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12276 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12277 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12278
12279 // Map coordinate system from active array to sensor output.
12280 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12281 scalerCropRegion.width, scalerCropRegion.height);
12282
12283 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12284 scalerCropRegion)) {
12285 rc = BAD_VALUE;
12286 }
12287 scalerCropSet = true;
12288 }
12289
12290 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12291 int64_t sensorExpTime =
12292 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12293 LOGD("setting sensorExpTime %lld", sensorExpTime);
12294 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12295 sensorExpTime)) {
12296 rc = BAD_VALUE;
12297 }
12298 }
12299
12300 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12301 int64_t sensorFrameDuration =
12302 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012303 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12304 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12305 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12306 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12307 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12308 sensorFrameDuration)) {
12309 rc = BAD_VALUE;
12310 }
12311 }
12312
12313 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12314 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12315 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12316 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12317 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12318 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12319 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12320 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12321 sensorSensitivity)) {
12322 rc = BAD_VALUE;
12323 }
12324 }
12325
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012326#ifndef USE_HAL_3_3
12327 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12328 int32_t ispSensitivity =
12329 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12330 if (ispSensitivity <
12331 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12332 ispSensitivity =
12333 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12334 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12335 }
12336 if (ispSensitivity >
12337 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12338 ispSensitivity =
12339 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12340 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12341 }
12342 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12343 ispSensitivity)) {
12344 rc = BAD_VALUE;
12345 }
12346 }
12347#endif
12348
Thierry Strudel3d639192016-09-09 11:52:26 -070012349 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12350 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12351 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12352 rc = BAD_VALUE;
12353 }
12354 }
12355
12356 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12357 uint8_t fwk_facedetectMode =
12358 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12359
12360 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12361 fwk_facedetectMode);
12362
12363 if (NAME_NOT_FOUND != val) {
12364 uint8_t facedetectMode = (uint8_t)val;
12365 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12366 facedetectMode)) {
12367 rc = BAD_VALUE;
12368 }
12369 }
12370 }
12371
Thierry Strudel54dc9782017-02-15 12:12:10 -080012372 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012373 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012374 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012375 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12376 histogramMode)) {
12377 rc = BAD_VALUE;
12378 }
12379 }
12380
12381 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12382 uint8_t sharpnessMapMode =
12383 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12384 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12385 sharpnessMapMode)) {
12386 rc = BAD_VALUE;
12387 }
12388 }
12389
12390 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12391 uint8_t tonemapMode =
12392 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12393 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12394 rc = BAD_VALUE;
12395 }
12396 }
12397 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12398 /*All tonemap channels will have the same number of points*/
12399 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12400 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12401 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12402 cam_rgb_tonemap_curves tonemapCurves;
12403 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12404 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12405 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12406 tonemapCurves.tonemap_points_cnt,
12407 CAM_MAX_TONEMAP_CURVE_SIZE);
12408 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12409 }
12410
12411 /* ch0 = G*/
12412 size_t point = 0;
12413 cam_tonemap_curve_t tonemapCurveGreen;
12414 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12415 for (size_t j = 0; j < 2; j++) {
12416 tonemapCurveGreen.tonemap_points[i][j] =
12417 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12418 point++;
12419 }
12420 }
12421 tonemapCurves.curves[0] = tonemapCurveGreen;
12422
12423 /* ch 1 = B */
12424 point = 0;
12425 cam_tonemap_curve_t tonemapCurveBlue;
12426 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12427 for (size_t j = 0; j < 2; j++) {
12428 tonemapCurveBlue.tonemap_points[i][j] =
12429 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12430 point++;
12431 }
12432 }
12433 tonemapCurves.curves[1] = tonemapCurveBlue;
12434
12435 /* ch 2 = R */
12436 point = 0;
12437 cam_tonemap_curve_t tonemapCurveRed;
12438 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12439 for (size_t j = 0; j < 2; j++) {
12440 tonemapCurveRed.tonemap_points[i][j] =
12441 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12442 point++;
12443 }
12444 }
12445 tonemapCurves.curves[2] = tonemapCurveRed;
12446
12447 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12448 tonemapCurves)) {
12449 rc = BAD_VALUE;
12450 }
12451 }
12452
12453 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12454 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12455 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12456 captureIntent)) {
12457 rc = BAD_VALUE;
12458 }
12459 }
12460
12461 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12462 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12463 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12464 blackLevelLock)) {
12465 rc = BAD_VALUE;
12466 }
12467 }
12468
12469 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12470 uint8_t lensShadingMapMode =
12471 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12472 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12473 lensShadingMapMode)) {
12474 rc = BAD_VALUE;
12475 }
12476 }
12477
12478 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12479 cam_area_t roi;
12480 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012481 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012482
12483 // Map coordinate system from active array to sensor output.
12484 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12485 roi.rect.height);
12486
12487 if (scalerCropSet) {
12488 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12489 }
12490 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12491 rc = BAD_VALUE;
12492 }
12493 }
12494
12495 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12496 cam_area_t roi;
12497 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012498 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012499
12500 // Map coordinate system from active array to sensor output.
12501 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12502 roi.rect.height);
12503
12504 if (scalerCropSet) {
12505 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12506 }
12507 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12508 rc = BAD_VALUE;
12509 }
12510 }
12511
12512 // CDS for non-HFR non-video mode
12513 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12514 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12515 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12516 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12517 LOGE("Invalid CDS mode %d!", *fwk_cds);
12518 } else {
12519 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12520 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12521 rc = BAD_VALUE;
12522 }
12523 }
12524 }
12525
Thierry Strudel04e026f2016-10-10 11:27:36 -070012526 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012527 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012528 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012529 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12530 }
12531 if (m_bVideoHdrEnabled)
12532 vhdr = CAM_VIDEO_HDR_MODE_ON;
12533
Thierry Strudel54dc9782017-02-15 12:12:10 -080012534 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12535
12536 if(vhdr != curr_hdr_state)
12537 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12538
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012539 rc = setVideoHdrMode(mParameters, vhdr);
12540 if (rc != NO_ERROR) {
12541 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012542 }
12543
12544 //IR
12545 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12546 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12547 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012548 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12549 uint8_t isIRon = 0;
12550
12551 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012552 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12553 LOGE("Invalid IR mode %d!", fwk_ir);
12554 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012555 if(isIRon != curr_ir_state )
12556 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12557
Thierry Strudel04e026f2016-10-10 11:27:36 -070012558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12559 CAM_INTF_META_IR_MODE, fwk_ir)) {
12560 rc = BAD_VALUE;
12561 }
12562 }
12563 }
12564
Thierry Strudel54dc9782017-02-15 12:12:10 -080012565 //Binning Correction Mode
12566 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12567 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12568 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12569 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12570 || (0 > fwk_binning_correction)) {
12571 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12572 } else {
12573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12574 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12575 rc = BAD_VALUE;
12576 }
12577 }
12578 }
12579
Thierry Strudel269c81a2016-10-12 12:13:59 -070012580 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12581 float aec_speed;
12582 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12583 LOGD("AEC Speed :%f", aec_speed);
12584 if ( aec_speed < 0 ) {
12585 LOGE("Invalid AEC mode %f!", aec_speed);
12586 } else {
12587 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12588 aec_speed)) {
12589 rc = BAD_VALUE;
12590 }
12591 }
12592 }
12593
12594 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12595 float awb_speed;
12596 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12597 LOGD("AWB Speed :%f", awb_speed);
12598 if ( awb_speed < 0 ) {
12599 LOGE("Invalid AWB mode %f!", awb_speed);
12600 } else {
12601 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12602 awb_speed)) {
12603 rc = BAD_VALUE;
12604 }
12605 }
12606 }
12607
Thierry Strudel3d639192016-09-09 11:52:26 -070012608 // TNR
12609 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12610 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12611 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012612 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012613 cam_denoise_param_t tnr;
12614 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12615 tnr.process_plates =
12616 (cam_denoise_process_type_t)frame_settings.find(
12617 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12618 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012619
12620 if(b_TnrRequested != curr_tnr_state)
12621 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12622
Thierry Strudel3d639192016-09-09 11:52:26 -070012623 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12624 rc = BAD_VALUE;
12625 }
12626 }
12627
Thierry Strudel54dc9782017-02-15 12:12:10 -080012628 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012629 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012630 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012631 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12632 *exposure_metering_mode)) {
12633 rc = BAD_VALUE;
12634 }
12635 }
12636
Thierry Strudel3d639192016-09-09 11:52:26 -070012637 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12638 int32_t fwk_testPatternMode =
12639 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12640 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12641 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12642
12643 if (NAME_NOT_FOUND != testPatternMode) {
12644 cam_test_pattern_data_t testPatternData;
12645 memset(&testPatternData, 0, sizeof(testPatternData));
12646 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12647 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12648 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12649 int32_t *fwk_testPatternData =
12650 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12651 testPatternData.r = fwk_testPatternData[0];
12652 testPatternData.b = fwk_testPatternData[3];
12653 switch (gCamCapability[mCameraId]->color_arrangement) {
12654 case CAM_FILTER_ARRANGEMENT_RGGB:
12655 case CAM_FILTER_ARRANGEMENT_GRBG:
12656 testPatternData.gr = fwk_testPatternData[1];
12657 testPatternData.gb = fwk_testPatternData[2];
12658 break;
12659 case CAM_FILTER_ARRANGEMENT_GBRG:
12660 case CAM_FILTER_ARRANGEMENT_BGGR:
12661 testPatternData.gr = fwk_testPatternData[2];
12662 testPatternData.gb = fwk_testPatternData[1];
12663 break;
12664 default:
12665 LOGE("color arrangement %d is not supported",
12666 gCamCapability[mCameraId]->color_arrangement);
12667 break;
12668 }
12669 }
12670 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12671 testPatternData)) {
12672 rc = BAD_VALUE;
12673 }
12674 } else {
12675 LOGE("Invalid framework sensor test pattern mode %d",
12676 fwk_testPatternMode);
12677 }
12678 }
12679
12680 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12681 size_t count = 0;
12682 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12683 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12684 gps_coords.data.d, gps_coords.count, count);
12685 if (gps_coords.count != count) {
12686 rc = BAD_VALUE;
12687 }
12688 }
12689
12690 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12691 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12692 size_t count = 0;
12693 const char *gps_methods_src = (const char *)
12694 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12695 memset(gps_methods, '\0', sizeof(gps_methods));
12696 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12697 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12698 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12699 if (GPS_PROCESSING_METHOD_SIZE != count) {
12700 rc = BAD_VALUE;
12701 }
12702 }
12703
12704 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12705 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12706 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12707 gps_timestamp)) {
12708 rc = BAD_VALUE;
12709 }
12710 }
12711
12712 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12713 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12714 cam_rotation_info_t rotation_info;
12715 if (orientation == 0) {
12716 rotation_info.rotation = ROTATE_0;
12717 } else if (orientation == 90) {
12718 rotation_info.rotation = ROTATE_90;
12719 } else if (orientation == 180) {
12720 rotation_info.rotation = ROTATE_180;
12721 } else if (orientation == 270) {
12722 rotation_info.rotation = ROTATE_270;
12723 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012724 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012725 rotation_info.streamId = snapshotStreamId;
12726 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12727 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12728 rc = BAD_VALUE;
12729 }
12730 }
12731
12732 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12733 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12735 rc = BAD_VALUE;
12736 }
12737 }
12738
12739 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12740 uint32_t thumb_quality = (uint32_t)
12741 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12742 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12743 thumb_quality)) {
12744 rc = BAD_VALUE;
12745 }
12746 }
12747
12748 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12749 cam_dimension_t dim;
12750 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12751 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12752 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12753 rc = BAD_VALUE;
12754 }
12755 }
12756
12757 // Internal metadata
12758 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12759 size_t count = 0;
12760 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12761 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12762 privatedata.data.i32, privatedata.count, count);
12763 if (privatedata.count != count) {
12764 rc = BAD_VALUE;
12765 }
12766 }
12767
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012768 // ISO/Exposure Priority
12769 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12770 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12771 cam_priority_mode_t mode =
12772 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12773 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12774 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12775 use_iso_exp_pty.previewOnly = FALSE;
12776 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12777 use_iso_exp_pty.value = *ptr;
12778
12779 if(CAM_ISO_PRIORITY == mode) {
12780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12781 use_iso_exp_pty)) {
12782 rc = BAD_VALUE;
12783 }
12784 }
12785 else {
12786 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12787 use_iso_exp_pty)) {
12788 rc = BAD_VALUE;
12789 }
12790 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012791
12792 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12793 rc = BAD_VALUE;
12794 }
12795 }
12796 } else {
12797 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12798 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012799 }
12800 }
12801
12802 // Saturation
12803 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12804 int32_t* use_saturation =
12805 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12806 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12807 rc = BAD_VALUE;
12808 }
12809 }
12810
Thierry Strudel3d639192016-09-09 11:52:26 -070012811 // EV step
12812 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12813 gCamCapability[mCameraId]->exp_compensation_step)) {
12814 rc = BAD_VALUE;
12815 }
12816
12817 // CDS info
12818 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12819 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12820 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12821
12822 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12823 CAM_INTF_META_CDS_DATA, *cdsData)) {
12824 rc = BAD_VALUE;
12825 }
12826 }
12827
Shuzhen Wang19463d72016-03-08 11:09:52 -080012828 // Hybrid AE
12829 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12830 uint8_t *hybrid_ae = (uint8_t *)
12831 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12832
12833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12834 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12835 rc = BAD_VALUE;
12836 }
12837 }
12838
Shuzhen Wang14415f52016-11-16 18:26:18 -080012839 // Histogram
12840 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12841 uint8_t histogramMode =
12842 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12843 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12844 histogramMode)) {
12845 rc = BAD_VALUE;
12846 }
12847 }
12848
12849 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12850 int32_t histogramBins =
12851 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12852 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12853 histogramBins)) {
12854 rc = BAD_VALUE;
12855 }
12856 }
12857
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012858 // Tracking AF
12859 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12860 uint8_t trackingAfTrigger =
12861 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12862 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12863 trackingAfTrigger)) {
12864 rc = BAD_VALUE;
12865 }
12866 }
12867
Thierry Strudel3d639192016-09-09 11:52:26 -070012868 return rc;
12869}
12870
12871/*===========================================================================
12872 * FUNCTION : captureResultCb
12873 *
12874 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12875 *
12876 * PARAMETERS :
12877 * @frame : frame information from mm-camera-interface
12878 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12879 * @userdata: userdata
12880 *
12881 * RETURN : NONE
12882 *==========================================================================*/
12883void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12884 camera3_stream_buffer_t *buffer,
12885 uint32_t frame_number, bool isInputBuffer, void *userdata)
12886{
12887 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12888 if (hw == NULL) {
12889 LOGE("Invalid hw %p", hw);
12890 return;
12891 }
12892
12893 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12894 return;
12895}
12896
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012897/*===========================================================================
12898 * FUNCTION : setBufferErrorStatus
12899 *
12900 * DESCRIPTION: Callback handler for channels to report any buffer errors
12901 *
12902 * PARAMETERS :
12903 * @ch : Channel on which buffer error is reported from
12904 * @frame_number : frame number on which buffer error is reported on
12905 * @buffer_status : buffer error status
12906 * @userdata: userdata
12907 *
12908 * RETURN : NONE
12909 *==========================================================================*/
12910void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12911 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12912{
12913 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12914 if (hw == NULL) {
12915 LOGE("Invalid hw %p", hw);
12916 return;
12917 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012918
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012919 hw->setBufferErrorStatus(ch, frame_number, err);
12920 return;
12921}
12922
12923void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12924 uint32_t frameNumber, camera3_buffer_status_t err)
12925{
12926 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12927 pthread_mutex_lock(&mMutex);
12928
12929 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12930 if (req.frame_number != frameNumber)
12931 continue;
12932 for (auto& k : req.mPendingBufferList) {
12933 if(k.stream->priv == ch) {
12934 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12935 }
12936 }
12937 }
12938
12939 pthread_mutex_unlock(&mMutex);
12940 return;
12941}
Thierry Strudel3d639192016-09-09 11:52:26 -070012942/*===========================================================================
12943 * FUNCTION : initialize
12944 *
12945 * DESCRIPTION: Pass framework callback pointers to HAL
12946 *
12947 * PARAMETERS :
12948 *
12949 *
12950 * RETURN : Success : 0
12951 * Failure: -ENODEV
12952 *==========================================================================*/
12953
12954int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12955 const camera3_callback_ops_t *callback_ops)
12956{
12957 LOGD("E");
12958 QCamera3HardwareInterface *hw =
12959 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12960 if (!hw) {
12961 LOGE("NULL camera device");
12962 return -ENODEV;
12963 }
12964
12965 int rc = hw->initialize(callback_ops);
12966 LOGD("X");
12967 return rc;
12968}
12969
12970/*===========================================================================
12971 * FUNCTION : configure_streams
12972 *
12973 * DESCRIPTION:
12974 *
12975 * PARAMETERS :
12976 *
12977 *
12978 * RETURN : Success: 0
12979 * Failure: -EINVAL (if stream configuration is invalid)
12980 * -ENODEV (fatal error)
12981 *==========================================================================*/
12982
12983int QCamera3HardwareInterface::configure_streams(
12984 const struct camera3_device *device,
12985 camera3_stream_configuration_t *stream_list)
12986{
12987 LOGD("E");
12988 QCamera3HardwareInterface *hw =
12989 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12990 if (!hw) {
12991 LOGE("NULL camera device");
12992 return -ENODEV;
12993 }
12994 int rc = hw->configureStreams(stream_list);
12995 LOGD("X");
12996 return rc;
12997}
12998
12999/*===========================================================================
13000 * FUNCTION : construct_default_request_settings
13001 *
13002 * DESCRIPTION: Configure a settings buffer to meet the required use case
13003 *
13004 * PARAMETERS :
13005 *
13006 *
13007 * RETURN : Success: Return valid metadata
13008 * Failure: Return NULL
13009 *==========================================================================*/
13010const camera_metadata_t* QCamera3HardwareInterface::
13011 construct_default_request_settings(const struct camera3_device *device,
13012 int type)
13013{
13014
13015 LOGD("E");
13016 camera_metadata_t* fwk_metadata = NULL;
13017 QCamera3HardwareInterface *hw =
13018 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13019 if (!hw) {
13020 LOGE("NULL camera device");
13021 return NULL;
13022 }
13023
13024 fwk_metadata = hw->translateCapabilityToMetadata(type);
13025
13026 LOGD("X");
13027 return fwk_metadata;
13028}
13029
13030/*===========================================================================
13031 * FUNCTION : process_capture_request
13032 *
13033 * DESCRIPTION:
13034 *
13035 * PARAMETERS :
13036 *
13037 *
13038 * RETURN :
13039 *==========================================================================*/
13040int QCamera3HardwareInterface::process_capture_request(
13041 const struct camera3_device *device,
13042 camera3_capture_request_t *request)
13043{
13044 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013045 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013046 QCamera3HardwareInterface *hw =
13047 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13048 if (!hw) {
13049 LOGE("NULL camera device");
13050 return -EINVAL;
13051 }
13052
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013053 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013054 LOGD("X");
13055 return rc;
13056}
13057
13058/*===========================================================================
13059 * FUNCTION : dump
13060 *
13061 * DESCRIPTION:
13062 *
13063 * PARAMETERS :
13064 *
13065 *
13066 * RETURN :
13067 *==========================================================================*/
13068
13069void QCamera3HardwareInterface::dump(
13070 const struct camera3_device *device, int fd)
13071{
13072 /* Log level property is read when "adb shell dumpsys media.camera" is
13073 called so that the log level can be controlled without restarting
13074 the media server */
13075 getLogLevel();
13076
13077 LOGD("E");
13078 QCamera3HardwareInterface *hw =
13079 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13080 if (!hw) {
13081 LOGE("NULL camera device");
13082 return;
13083 }
13084
13085 hw->dump(fd);
13086 LOGD("X");
13087 return;
13088}
13089
13090/*===========================================================================
13091 * FUNCTION : flush
13092 *
13093 * DESCRIPTION:
13094 *
13095 * PARAMETERS :
13096 *
13097 *
13098 * RETURN :
13099 *==========================================================================*/
13100
13101int QCamera3HardwareInterface::flush(
13102 const struct camera3_device *device)
13103{
13104 int rc;
13105 LOGD("E");
13106 QCamera3HardwareInterface *hw =
13107 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13108 if (!hw) {
13109 LOGE("NULL camera device");
13110 return -EINVAL;
13111 }
13112
13113 pthread_mutex_lock(&hw->mMutex);
13114 // Validate current state
13115 switch (hw->mState) {
13116 case STARTED:
13117 /* valid state */
13118 break;
13119
13120 case ERROR:
13121 pthread_mutex_unlock(&hw->mMutex);
13122 hw->handleCameraDeviceError();
13123 return -ENODEV;
13124
13125 default:
13126 LOGI("Flush returned during state %d", hw->mState);
13127 pthread_mutex_unlock(&hw->mMutex);
13128 return 0;
13129 }
13130 pthread_mutex_unlock(&hw->mMutex);
13131
13132 rc = hw->flush(true /* restart channels */ );
13133 LOGD("X");
13134 return rc;
13135}
13136
13137/*===========================================================================
13138 * FUNCTION : close_camera_device
13139 *
13140 * DESCRIPTION:
13141 *
13142 * PARAMETERS :
13143 *
13144 *
13145 * RETURN :
13146 *==========================================================================*/
13147int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13148{
13149 int ret = NO_ERROR;
13150 QCamera3HardwareInterface *hw =
13151 reinterpret_cast<QCamera3HardwareInterface *>(
13152 reinterpret_cast<camera3_device_t *>(device)->priv);
13153 if (!hw) {
13154 LOGE("NULL camera device");
13155 return BAD_VALUE;
13156 }
13157
13158 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13159 delete hw;
13160 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013161 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013162 return ret;
13163}
13164
13165/*===========================================================================
13166 * FUNCTION : getWaveletDenoiseProcessPlate
13167 *
13168 * DESCRIPTION: query wavelet denoise process plate
13169 *
13170 * PARAMETERS : None
13171 *
13172 * RETURN : WNR prcocess plate value
13173 *==========================================================================*/
13174cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13175{
13176 char prop[PROPERTY_VALUE_MAX];
13177 memset(prop, 0, sizeof(prop));
13178 property_get("persist.denoise.process.plates", prop, "0");
13179 int processPlate = atoi(prop);
13180 switch(processPlate) {
13181 case 0:
13182 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13183 case 1:
13184 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13185 case 2:
13186 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13187 case 3:
13188 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13189 default:
13190 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13191 }
13192}
13193
13194
13195/*===========================================================================
13196 * FUNCTION : getTemporalDenoiseProcessPlate
13197 *
13198 * DESCRIPTION: query temporal denoise process plate
13199 *
13200 * PARAMETERS : None
13201 *
13202 * RETURN : TNR prcocess plate value
13203 *==========================================================================*/
13204cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13205{
13206 char prop[PROPERTY_VALUE_MAX];
13207 memset(prop, 0, sizeof(prop));
13208 property_get("persist.tnr.process.plates", prop, "0");
13209 int processPlate = atoi(prop);
13210 switch(processPlate) {
13211 case 0:
13212 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13213 case 1:
13214 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13215 case 2:
13216 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13217 case 3:
13218 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13219 default:
13220 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13221 }
13222}
13223
13224
13225/*===========================================================================
13226 * FUNCTION : extractSceneMode
13227 *
13228 * DESCRIPTION: Extract scene mode from frameworks set metadata
13229 *
13230 * PARAMETERS :
13231 * @frame_settings: CameraMetadata reference
13232 * @metaMode: ANDROID_CONTORL_MODE
13233 * @hal_metadata: hal metadata structure
13234 *
13235 * RETURN : None
13236 *==========================================================================*/
13237int32_t QCamera3HardwareInterface::extractSceneMode(
13238 const CameraMetadata &frame_settings, uint8_t metaMode,
13239 metadata_buffer_t *hal_metadata)
13240{
13241 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013242 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13243
13244 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13245 LOGD("Ignoring control mode OFF_KEEP_STATE");
13246 return NO_ERROR;
13247 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013248
13249 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13250 camera_metadata_ro_entry entry =
13251 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13252 if (0 == entry.count)
13253 return rc;
13254
13255 uint8_t fwk_sceneMode = entry.data.u8[0];
13256
13257 int val = lookupHalName(SCENE_MODES_MAP,
13258 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13259 fwk_sceneMode);
13260 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013261 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013262 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013263 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013264 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013265
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013266 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13267 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13268 }
13269
13270 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13271 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013272 cam_hdr_param_t hdr_params;
13273 hdr_params.hdr_enable = 1;
13274 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13275 hdr_params.hdr_need_1x = false;
13276 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13277 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13278 rc = BAD_VALUE;
13279 }
13280 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013281
Thierry Strudel3d639192016-09-09 11:52:26 -070013282 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13283 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13284 rc = BAD_VALUE;
13285 }
13286 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013287
13288 if (mForceHdrSnapshot) {
13289 cam_hdr_param_t hdr_params;
13290 hdr_params.hdr_enable = 1;
13291 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13292 hdr_params.hdr_need_1x = false;
13293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13294 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13295 rc = BAD_VALUE;
13296 }
13297 }
13298
Thierry Strudel3d639192016-09-09 11:52:26 -070013299 return rc;
13300}
13301
13302/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013303 * FUNCTION : setVideoHdrMode
13304 *
13305 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13306 *
13307 * PARAMETERS :
13308 * @hal_metadata: hal metadata structure
13309 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13310 *
13311 * RETURN : None
13312 *==========================================================================*/
13313int32_t QCamera3HardwareInterface::setVideoHdrMode(
13314 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13315{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013316 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13317 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13318 }
13319
13320 LOGE("Invalid Video HDR mode %d!", vhdr);
13321 return BAD_VALUE;
13322}
13323
13324/*===========================================================================
13325 * FUNCTION : setSensorHDR
13326 *
13327 * DESCRIPTION: Enable/disable sensor HDR.
13328 *
13329 * PARAMETERS :
13330 * @hal_metadata: hal metadata structure
13331 * @enable: boolean whether to enable/disable sensor HDR
13332 *
13333 * RETURN : None
13334 *==========================================================================*/
13335int32_t QCamera3HardwareInterface::setSensorHDR(
13336 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13337{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013338 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013339 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13340
13341 if (enable) {
13342 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13343 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13344 #ifdef _LE_CAMERA_
13345 //Default to staggered HDR for IOT
13346 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13347 #else
13348 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13349 #endif
13350 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13351 }
13352
13353 bool isSupported = false;
13354 switch (sensor_hdr) {
13355 case CAM_SENSOR_HDR_IN_SENSOR:
13356 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13357 CAM_QCOM_FEATURE_SENSOR_HDR) {
13358 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013359 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013360 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013361 break;
13362 case CAM_SENSOR_HDR_ZIGZAG:
13363 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13364 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13365 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013366 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013367 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013368 break;
13369 case CAM_SENSOR_HDR_STAGGERED:
13370 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13371 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13372 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013373 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013374 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013375 break;
13376 case CAM_SENSOR_HDR_OFF:
13377 isSupported = true;
13378 LOGD("Turning off sensor HDR");
13379 break;
13380 default:
13381 LOGE("HDR mode %d not supported", sensor_hdr);
13382 rc = BAD_VALUE;
13383 break;
13384 }
13385
13386 if(isSupported) {
13387 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13388 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13389 rc = BAD_VALUE;
13390 } else {
13391 if(!isVideoHdrEnable)
13392 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013393 }
13394 }
13395 return rc;
13396}
13397
13398/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013399 * FUNCTION : needRotationReprocess
13400 *
13401 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13402 *
13403 * PARAMETERS : none
13404 *
13405 * RETURN : true: needed
13406 * false: no need
13407 *==========================================================================*/
13408bool QCamera3HardwareInterface::needRotationReprocess()
13409{
13410 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13411 // current rotation is not zero, and pp has the capability to process rotation
13412 LOGH("need do reprocess for rotation");
13413 return true;
13414 }
13415
13416 return false;
13417}
13418
13419/*===========================================================================
13420 * FUNCTION : needReprocess
13421 *
13422 * DESCRIPTION: if reprocess in needed
13423 *
13424 * PARAMETERS : none
13425 *
13426 * RETURN : true: needed
13427 * false: no need
13428 *==========================================================================*/
13429bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13430{
13431 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13432 // TODO: add for ZSL HDR later
13433 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13434 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13435 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13436 return true;
13437 } else {
13438 LOGH("already post processed frame");
13439 return false;
13440 }
13441 }
13442 return needRotationReprocess();
13443}
13444
13445/*===========================================================================
13446 * FUNCTION : needJpegExifRotation
13447 *
13448 * DESCRIPTION: if rotation from jpeg is needed
13449 *
13450 * PARAMETERS : none
13451 *
13452 * RETURN : true: needed
13453 * false: no need
13454 *==========================================================================*/
13455bool QCamera3HardwareInterface::needJpegExifRotation()
13456{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013457 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013458 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13459 LOGD("Need use Jpeg EXIF Rotation");
13460 return true;
13461 }
13462 return false;
13463}
13464
13465/*===========================================================================
13466 * FUNCTION : addOfflineReprocChannel
13467 *
13468 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13469 * coming from input channel
13470 *
13471 * PARAMETERS :
13472 * @config : reprocess configuration
13473 * @inputChHandle : pointer to the input (source) channel
13474 *
13475 *
13476 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13477 *==========================================================================*/
13478QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13479 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13480{
13481 int32_t rc = NO_ERROR;
13482 QCamera3ReprocessChannel *pChannel = NULL;
13483
13484 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013485 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13486 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013487 if (NULL == pChannel) {
13488 LOGE("no mem for reprocess channel");
13489 return NULL;
13490 }
13491
13492 rc = pChannel->initialize(IS_TYPE_NONE);
13493 if (rc != NO_ERROR) {
13494 LOGE("init reprocess channel failed, ret = %d", rc);
13495 delete pChannel;
13496 return NULL;
13497 }
13498
13499 // pp feature config
13500 cam_pp_feature_config_t pp_config;
13501 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13502
13503 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13504 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13505 & CAM_QCOM_FEATURE_DSDN) {
13506 //Use CPP CDS incase h/w supports it.
13507 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13508 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13509 }
13510 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13511 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13512 }
13513
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013514 if (config.hdr_param.hdr_enable) {
13515 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13516 pp_config.hdr_param = config.hdr_param;
13517 }
13518
13519 if (mForceHdrSnapshot) {
13520 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13521 pp_config.hdr_param.hdr_enable = 1;
13522 pp_config.hdr_param.hdr_need_1x = 0;
13523 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13524 }
13525
Thierry Strudel3d639192016-09-09 11:52:26 -070013526 rc = pChannel->addReprocStreamsFromSource(pp_config,
13527 config,
13528 IS_TYPE_NONE,
13529 mMetadataChannel);
13530
13531 if (rc != NO_ERROR) {
13532 delete pChannel;
13533 return NULL;
13534 }
13535 return pChannel;
13536}
13537
13538/*===========================================================================
13539 * FUNCTION : getMobicatMask
13540 *
13541 * DESCRIPTION: returns mobicat mask
13542 *
13543 * PARAMETERS : none
13544 *
13545 * RETURN : mobicat mask
13546 *
13547 *==========================================================================*/
13548uint8_t QCamera3HardwareInterface::getMobicatMask()
13549{
13550 return m_MobicatMask;
13551}
13552
13553/*===========================================================================
13554 * FUNCTION : setMobicat
13555 *
13556 * DESCRIPTION: set Mobicat on/off.
13557 *
13558 * PARAMETERS :
13559 * @params : none
13560 *
13561 * RETURN : int32_t type of status
13562 * NO_ERROR -- success
13563 * none-zero failure code
13564 *==========================================================================*/
13565int32_t QCamera3HardwareInterface::setMobicat()
13566{
13567 char value [PROPERTY_VALUE_MAX];
13568 property_get("persist.camera.mobicat", value, "0");
13569 int32_t ret = NO_ERROR;
13570 uint8_t enableMobi = (uint8_t)atoi(value);
13571
13572 if (enableMobi) {
13573 tune_cmd_t tune_cmd;
13574 tune_cmd.type = SET_RELOAD_CHROMATIX;
13575 tune_cmd.module = MODULE_ALL;
13576 tune_cmd.value = TRUE;
13577 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13578 CAM_INTF_PARM_SET_VFE_COMMAND,
13579 tune_cmd);
13580
13581 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13582 CAM_INTF_PARM_SET_PP_COMMAND,
13583 tune_cmd);
13584 }
13585 m_MobicatMask = enableMobi;
13586
13587 return ret;
13588}
13589
13590/*===========================================================================
13591* FUNCTION : getLogLevel
13592*
13593* DESCRIPTION: Reads the log level property into a variable
13594*
13595* PARAMETERS :
13596* None
13597*
13598* RETURN :
13599* None
13600*==========================================================================*/
13601void QCamera3HardwareInterface::getLogLevel()
13602{
13603 char prop[PROPERTY_VALUE_MAX];
13604 uint32_t globalLogLevel = 0;
13605
13606 property_get("persist.camera.hal.debug", prop, "0");
13607 int val = atoi(prop);
13608 if (0 <= val) {
13609 gCamHal3LogLevel = (uint32_t)val;
13610 }
13611
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013612 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013613 gKpiDebugLevel = atoi(prop);
13614
13615 property_get("persist.camera.global.debug", prop, "0");
13616 val = atoi(prop);
13617 if (0 <= val) {
13618 globalLogLevel = (uint32_t)val;
13619 }
13620
13621 /* Highest log level among hal.logs and global.logs is selected */
13622 if (gCamHal3LogLevel < globalLogLevel)
13623 gCamHal3LogLevel = globalLogLevel;
13624
13625 return;
13626}
13627
13628/*===========================================================================
13629 * FUNCTION : validateStreamRotations
13630 *
13631 * DESCRIPTION: Check if the rotations requested are supported
13632 *
13633 * PARAMETERS :
13634 * @stream_list : streams to be configured
13635 *
13636 * RETURN : NO_ERROR on success
13637 * -EINVAL on failure
13638 *
13639 *==========================================================================*/
13640int QCamera3HardwareInterface::validateStreamRotations(
13641 camera3_stream_configuration_t *streamList)
13642{
13643 int rc = NO_ERROR;
13644
13645 /*
13646 * Loop through all streams requested in configuration
13647 * Check if unsupported rotations have been requested on any of them
13648 */
13649 for (size_t j = 0; j < streamList->num_streams; j++){
13650 camera3_stream_t *newStream = streamList->streams[j];
13651
13652 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13653 bool isImplDef = (newStream->format ==
13654 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13655 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13656 isImplDef);
13657
13658 if (isRotated && (!isImplDef || isZsl)) {
13659 LOGE("Error: Unsupported rotation of %d requested for stream"
13660 "type:%d and stream format:%d",
13661 newStream->rotation, newStream->stream_type,
13662 newStream->format);
13663 rc = -EINVAL;
13664 break;
13665 }
13666 }
13667
13668 return rc;
13669}
13670
13671/*===========================================================================
13672* FUNCTION : getFlashInfo
13673*
13674* DESCRIPTION: Retrieve information about whether the device has a flash.
13675*
13676* PARAMETERS :
13677* @cameraId : Camera id to query
13678* @hasFlash : Boolean indicating whether there is a flash device
13679* associated with given camera
13680* @flashNode : If a flash device exists, this will be its device node.
13681*
13682* RETURN :
13683* None
13684*==========================================================================*/
13685void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13686 bool& hasFlash,
13687 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13688{
13689 cam_capability_t* camCapability = gCamCapability[cameraId];
13690 if (NULL == camCapability) {
13691 hasFlash = false;
13692 flashNode[0] = '\0';
13693 } else {
13694 hasFlash = camCapability->flash_available;
13695 strlcpy(flashNode,
13696 (char*)camCapability->flash_dev_name,
13697 QCAMERA_MAX_FILEPATH_LENGTH);
13698 }
13699}
13700
13701/*===========================================================================
13702* FUNCTION : getEepromVersionInfo
13703*
13704* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13705*
13706* PARAMETERS : None
13707*
13708* RETURN : string describing EEPROM version
13709* "\0" if no such info available
13710*==========================================================================*/
13711const char *QCamera3HardwareInterface::getEepromVersionInfo()
13712{
13713 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13714}
13715
13716/*===========================================================================
13717* FUNCTION : getLdafCalib
13718*
13719* DESCRIPTION: Retrieve Laser AF calibration data
13720*
13721* PARAMETERS : None
13722*
13723* RETURN : Two uint32_t describing laser AF calibration data
13724* NULL if none is available.
13725*==========================================================================*/
13726const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13727{
13728 if (mLdafCalibExist) {
13729 return &mLdafCalib[0];
13730 } else {
13731 return NULL;
13732 }
13733}
13734
13735/*===========================================================================
13736 * FUNCTION : dynamicUpdateMetaStreamInfo
13737 *
13738 * DESCRIPTION: This function:
13739 * (1) stops all the channels
13740 * (2) returns error on pending requests and buffers
13741 * (3) sends metastream_info in setparams
13742 * (4) starts all channels
13743 * This is useful when sensor has to be restarted to apply any
13744 * settings such as frame rate from a different sensor mode
13745 *
13746 * PARAMETERS : None
13747 *
13748 * RETURN : NO_ERROR on success
13749 * Error codes on failure
13750 *
13751 *==========================================================================*/
13752int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13753{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013754 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013755 int rc = NO_ERROR;
13756
13757 LOGD("E");
13758
13759 rc = stopAllChannels();
13760 if (rc < 0) {
13761 LOGE("stopAllChannels failed");
13762 return rc;
13763 }
13764
13765 rc = notifyErrorForPendingRequests();
13766 if (rc < 0) {
13767 LOGE("notifyErrorForPendingRequests failed");
13768 return rc;
13769 }
13770
13771 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13772 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13773 "Format:%d",
13774 mStreamConfigInfo.type[i],
13775 mStreamConfigInfo.stream_sizes[i].width,
13776 mStreamConfigInfo.stream_sizes[i].height,
13777 mStreamConfigInfo.postprocess_mask[i],
13778 mStreamConfigInfo.format[i]);
13779 }
13780
13781 /* Send meta stream info once again so that ISP can start */
13782 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13783 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13784 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13785 mParameters);
13786 if (rc < 0) {
13787 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13788 }
13789
13790 rc = startAllChannels();
13791 if (rc < 0) {
13792 LOGE("startAllChannels failed");
13793 return rc;
13794 }
13795
13796 LOGD("X");
13797 return rc;
13798}
13799
13800/*===========================================================================
13801 * FUNCTION : stopAllChannels
13802 *
13803 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13804 *
13805 * PARAMETERS : None
13806 *
13807 * RETURN : NO_ERROR on success
13808 * Error codes on failure
13809 *
13810 *==========================================================================*/
13811int32_t QCamera3HardwareInterface::stopAllChannels()
13812{
13813 int32_t rc = NO_ERROR;
13814
13815 LOGD("Stopping all channels");
13816 // Stop the Streams/Channels
13817 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13818 it != mStreamInfo.end(); it++) {
13819 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13820 if (channel) {
13821 channel->stop();
13822 }
13823 (*it)->status = INVALID;
13824 }
13825
13826 if (mSupportChannel) {
13827 mSupportChannel->stop();
13828 }
13829 if (mAnalysisChannel) {
13830 mAnalysisChannel->stop();
13831 }
13832 if (mRawDumpChannel) {
13833 mRawDumpChannel->stop();
13834 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013835 if (mHdrPlusRawSrcChannel) {
13836 mHdrPlusRawSrcChannel->stop();
13837 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013838 if (mMetadataChannel) {
13839 /* If content of mStreamInfo is not 0, there is metadata stream */
13840 mMetadataChannel->stop();
13841 }
13842
13843 LOGD("All channels stopped");
13844 return rc;
13845}
13846
13847/*===========================================================================
13848 * FUNCTION : startAllChannels
13849 *
13850 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13851 *
13852 * PARAMETERS : None
13853 *
13854 * RETURN : NO_ERROR on success
13855 * Error codes on failure
13856 *
13857 *==========================================================================*/
13858int32_t QCamera3HardwareInterface::startAllChannels()
13859{
13860 int32_t rc = NO_ERROR;
13861
13862 LOGD("Start all channels ");
13863 // Start the Streams/Channels
13864 if (mMetadataChannel) {
13865 /* If content of mStreamInfo is not 0, there is metadata stream */
13866 rc = mMetadataChannel->start();
13867 if (rc < 0) {
13868 LOGE("META channel start failed");
13869 return rc;
13870 }
13871 }
13872 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13873 it != mStreamInfo.end(); it++) {
13874 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13875 if (channel) {
13876 rc = channel->start();
13877 if (rc < 0) {
13878 LOGE("channel start failed");
13879 return rc;
13880 }
13881 }
13882 }
13883 if (mAnalysisChannel) {
13884 mAnalysisChannel->start();
13885 }
13886 if (mSupportChannel) {
13887 rc = mSupportChannel->start();
13888 if (rc < 0) {
13889 LOGE("Support channel start failed");
13890 return rc;
13891 }
13892 }
13893 if (mRawDumpChannel) {
13894 rc = mRawDumpChannel->start();
13895 if (rc < 0) {
13896 LOGE("RAW dump channel start failed");
13897 return rc;
13898 }
13899 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013900 if (mHdrPlusRawSrcChannel) {
13901 rc = mHdrPlusRawSrcChannel->start();
13902 if (rc < 0) {
13903 LOGE("HDR+ RAW channel start failed");
13904 return rc;
13905 }
13906 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013907
13908 LOGD("All channels started");
13909 return rc;
13910}
13911
13912/*===========================================================================
13913 * FUNCTION : notifyErrorForPendingRequests
13914 *
13915 * DESCRIPTION: This function sends error for all the pending requests/buffers
13916 *
13917 * PARAMETERS : None
13918 *
13919 * RETURN : Error codes
13920 * NO_ERROR on success
13921 *
13922 *==========================================================================*/
13923int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13924{
13925 int32_t rc = NO_ERROR;
13926 unsigned int frameNum = 0;
13927 camera3_capture_result_t result;
13928 camera3_stream_buffer_t *pStream_Buf = NULL;
13929
13930 memset(&result, 0, sizeof(camera3_capture_result_t));
13931
13932 if (mPendingRequestsList.size() > 0) {
13933 pendingRequestIterator i = mPendingRequestsList.begin();
13934 frameNum = i->frame_number;
13935 } else {
13936 /* There might still be pending buffers even though there are
13937 no pending requests. Setting the frameNum to MAX so that
13938 all the buffers with smaller frame numbers are returned */
13939 frameNum = UINT_MAX;
13940 }
13941
13942 LOGH("Oldest frame num on mPendingRequestsList = %u",
13943 frameNum);
13944
Emilian Peev7650c122017-01-19 08:24:33 -080013945 notifyErrorFoPendingDepthData(mDepthChannel);
13946
Thierry Strudel3d639192016-09-09 11:52:26 -070013947 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13948 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13949
13950 if (req->frame_number < frameNum) {
13951 // Send Error notify to frameworks for each buffer for which
13952 // metadata buffer is already sent
13953 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13954 req->frame_number, req->mPendingBufferList.size());
13955
13956 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13957 if (NULL == pStream_Buf) {
13958 LOGE("No memory for pending buffers array");
13959 return NO_MEMORY;
13960 }
13961 memset(pStream_Buf, 0,
13962 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13963 result.result = NULL;
13964 result.frame_number = req->frame_number;
13965 result.num_output_buffers = req->mPendingBufferList.size();
13966 result.output_buffers = pStream_Buf;
13967
13968 size_t index = 0;
13969 for (auto info = req->mPendingBufferList.begin();
13970 info != req->mPendingBufferList.end(); ) {
13971
13972 camera3_notify_msg_t notify_msg;
13973 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13974 notify_msg.type = CAMERA3_MSG_ERROR;
13975 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13976 notify_msg.message.error.error_stream = info->stream;
13977 notify_msg.message.error.frame_number = req->frame_number;
13978 pStream_Buf[index].acquire_fence = -1;
13979 pStream_Buf[index].release_fence = -1;
13980 pStream_Buf[index].buffer = info->buffer;
13981 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13982 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013983 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013984 index++;
13985 // Remove buffer from list
13986 info = req->mPendingBufferList.erase(info);
13987 }
13988
13989 // Remove this request from Map
13990 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13991 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13992 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13993
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013994 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013995
13996 delete [] pStream_Buf;
13997 } else {
13998
13999 // Go through the pending requests info and send error request to framework
14000 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
14001
14002 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
14003
14004 // Send error notify to frameworks
14005 camera3_notify_msg_t notify_msg;
14006 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14007 notify_msg.type = CAMERA3_MSG_ERROR;
14008 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14009 notify_msg.message.error.error_stream = NULL;
14010 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014011 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014012
14013 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
14014 if (NULL == pStream_Buf) {
14015 LOGE("No memory for pending buffers array");
14016 return NO_MEMORY;
14017 }
14018 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
14019
14020 result.result = NULL;
14021 result.frame_number = req->frame_number;
14022 result.input_buffer = i->input_buffer;
14023 result.num_output_buffers = req->mPendingBufferList.size();
14024 result.output_buffers = pStream_Buf;
14025
14026 size_t index = 0;
14027 for (auto info = req->mPendingBufferList.begin();
14028 info != req->mPendingBufferList.end(); ) {
14029 pStream_Buf[index].acquire_fence = -1;
14030 pStream_Buf[index].release_fence = -1;
14031 pStream_Buf[index].buffer = info->buffer;
14032 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
14033 pStream_Buf[index].stream = info->stream;
14034 index++;
14035 // Remove buffer from list
14036 info = req->mPendingBufferList.erase(info);
14037 }
14038
14039 // Remove this request from Map
14040 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
14041 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
14042 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
14043
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014044 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014045 delete [] pStream_Buf;
14046 i = erasePendingRequest(i);
14047 }
14048 }
14049
14050 /* Reset pending frame Drop list and requests list */
14051 mPendingFrameDropList.clear();
14052
14053 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
14054 req.mPendingBufferList.clear();
14055 }
14056 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014057 LOGH("Cleared all the pending buffers ");
14058
14059 return rc;
14060}
14061
14062bool QCamera3HardwareInterface::isOnEncoder(
14063 const cam_dimension_t max_viewfinder_size,
14064 uint32_t width, uint32_t height)
14065{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014066 return ((width > (uint32_t)max_viewfinder_size.width) ||
14067 (height > (uint32_t)max_viewfinder_size.height) ||
14068 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14069 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014070}
14071
14072/*===========================================================================
14073 * FUNCTION : setBundleInfo
14074 *
14075 * DESCRIPTION: Set bundle info for all streams that are bundle.
14076 *
14077 * PARAMETERS : None
14078 *
14079 * RETURN : NO_ERROR on success
14080 * Error codes on failure
14081 *==========================================================================*/
14082int32_t QCamera3HardwareInterface::setBundleInfo()
14083{
14084 int32_t rc = NO_ERROR;
14085
14086 if (mChannelHandle) {
14087 cam_bundle_config_t bundleInfo;
14088 memset(&bundleInfo, 0, sizeof(bundleInfo));
14089 rc = mCameraHandle->ops->get_bundle_info(
14090 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14091 if (rc != NO_ERROR) {
14092 LOGE("get_bundle_info failed");
14093 return rc;
14094 }
14095 if (mAnalysisChannel) {
14096 mAnalysisChannel->setBundleInfo(bundleInfo);
14097 }
14098 if (mSupportChannel) {
14099 mSupportChannel->setBundleInfo(bundleInfo);
14100 }
14101 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14102 it != mStreamInfo.end(); it++) {
14103 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14104 channel->setBundleInfo(bundleInfo);
14105 }
14106 if (mRawDumpChannel) {
14107 mRawDumpChannel->setBundleInfo(bundleInfo);
14108 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014109 if (mHdrPlusRawSrcChannel) {
14110 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14111 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014112 }
14113
14114 return rc;
14115}
14116
14117/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014118 * FUNCTION : setInstantAEC
14119 *
14120 * DESCRIPTION: Set Instant AEC related params.
14121 *
14122 * PARAMETERS :
14123 * @meta: CameraMetadata reference
14124 *
14125 * RETURN : NO_ERROR on success
14126 * Error codes on failure
14127 *==========================================================================*/
14128int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14129{
14130 int32_t rc = NO_ERROR;
14131 uint8_t val = 0;
14132 char prop[PROPERTY_VALUE_MAX];
14133
14134 // First try to configure instant AEC from framework metadata
14135 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14136 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14137 }
14138
14139 // If framework did not set this value, try to read from set prop.
14140 if (val == 0) {
14141 memset(prop, 0, sizeof(prop));
14142 property_get("persist.camera.instant.aec", prop, "0");
14143 val = (uint8_t)atoi(prop);
14144 }
14145
14146 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14147 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14148 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14149 mInstantAEC = val;
14150 mInstantAECSettledFrameNumber = 0;
14151 mInstantAecFrameIdxCount = 0;
14152 LOGH("instantAEC value set %d",val);
14153 if (mInstantAEC) {
14154 memset(prop, 0, sizeof(prop));
14155 property_get("persist.camera.ae.instant.bound", prop, "10");
14156 int32_t aec_frame_skip_cnt = atoi(prop);
14157 if (aec_frame_skip_cnt >= 0) {
14158 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14159 } else {
14160 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14161 rc = BAD_VALUE;
14162 }
14163 }
14164 } else {
14165 LOGE("Bad instant aec value set %d", val);
14166 rc = BAD_VALUE;
14167 }
14168 return rc;
14169}
14170
14171/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014172 * FUNCTION : get_num_overall_buffers
14173 *
14174 * DESCRIPTION: Estimate number of pending buffers across all requests.
14175 *
14176 * PARAMETERS : None
14177 *
14178 * RETURN : Number of overall pending buffers
14179 *
14180 *==========================================================================*/
14181uint32_t PendingBuffersMap::get_num_overall_buffers()
14182{
14183 uint32_t sum_buffers = 0;
14184 for (auto &req : mPendingBuffersInRequest) {
14185 sum_buffers += req.mPendingBufferList.size();
14186 }
14187 return sum_buffers;
14188}
14189
14190/*===========================================================================
14191 * FUNCTION : removeBuf
14192 *
14193 * DESCRIPTION: Remove a matching buffer from tracker.
14194 *
14195 * PARAMETERS : @buffer: image buffer for the callback
14196 *
14197 * RETURN : None
14198 *
14199 *==========================================================================*/
14200void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14201{
14202 bool buffer_found = false;
14203 for (auto req = mPendingBuffersInRequest.begin();
14204 req != mPendingBuffersInRequest.end(); req++) {
14205 for (auto k = req->mPendingBufferList.begin();
14206 k != req->mPendingBufferList.end(); k++ ) {
14207 if (k->buffer == buffer) {
14208 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14209 req->frame_number, buffer);
14210 k = req->mPendingBufferList.erase(k);
14211 if (req->mPendingBufferList.empty()) {
14212 // Remove this request from Map
14213 req = mPendingBuffersInRequest.erase(req);
14214 }
14215 buffer_found = true;
14216 break;
14217 }
14218 }
14219 if (buffer_found) {
14220 break;
14221 }
14222 }
14223 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14224 get_num_overall_buffers());
14225}
14226
14227/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014228 * FUNCTION : getBufErrStatus
14229 *
14230 * DESCRIPTION: get buffer error status
14231 *
14232 * PARAMETERS : @buffer: buffer handle
14233 *
14234 * RETURN : Error status
14235 *
14236 *==========================================================================*/
14237int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14238{
14239 for (auto& req : mPendingBuffersInRequest) {
14240 for (auto& k : req.mPendingBufferList) {
14241 if (k.buffer == buffer)
14242 return k.bufStatus;
14243 }
14244 }
14245 return CAMERA3_BUFFER_STATUS_OK;
14246}
14247
14248/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014249 * FUNCTION : setPAAFSupport
14250 *
14251 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14252 * feature mask according to stream type and filter
14253 * arrangement
14254 *
14255 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14256 * @stream_type: stream type
14257 * @filter_arrangement: filter arrangement
14258 *
14259 * RETURN : None
14260 *==========================================================================*/
14261void QCamera3HardwareInterface::setPAAFSupport(
14262 cam_feature_mask_t& feature_mask,
14263 cam_stream_type_t stream_type,
14264 cam_color_filter_arrangement_t filter_arrangement)
14265{
Thierry Strudel3d639192016-09-09 11:52:26 -070014266 switch (filter_arrangement) {
14267 case CAM_FILTER_ARRANGEMENT_RGGB:
14268 case CAM_FILTER_ARRANGEMENT_GRBG:
14269 case CAM_FILTER_ARRANGEMENT_GBRG:
14270 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014271 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14272 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014273 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014274 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14275 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014276 }
14277 break;
14278 case CAM_FILTER_ARRANGEMENT_Y:
14279 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14280 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14281 }
14282 break;
14283 default:
14284 break;
14285 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014286 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14287 feature_mask, stream_type, filter_arrangement);
14288
14289
Thierry Strudel3d639192016-09-09 11:52:26 -070014290}
14291
14292/*===========================================================================
14293* FUNCTION : getSensorMountAngle
14294*
14295* DESCRIPTION: Retrieve sensor mount angle
14296*
14297* PARAMETERS : None
14298*
14299* RETURN : sensor mount angle in uint32_t
14300*==========================================================================*/
14301uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14302{
14303 return gCamCapability[mCameraId]->sensor_mount_angle;
14304}
14305
14306/*===========================================================================
14307* FUNCTION : getRelatedCalibrationData
14308*
14309* DESCRIPTION: Retrieve related system calibration data
14310*
14311* PARAMETERS : None
14312*
14313* RETURN : Pointer of related system calibration data
14314*==========================================================================*/
14315const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14316{
14317 return (const cam_related_system_calibration_data_t *)
14318 &(gCamCapability[mCameraId]->related_cam_calibration);
14319}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014320
14321/*===========================================================================
14322 * FUNCTION : is60HzZone
14323 *
14324 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14325 *
14326 * PARAMETERS : None
14327 *
14328 * RETURN : True if in 60Hz zone, False otherwise
14329 *==========================================================================*/
14330bool QCamera3HardwareInterface::is60HzZone()
14331{
14332 time_t t = time(NULL);
14333 struct tm lt;
14334
14335 struct tm* r = localtime_r(&t, &lt);
14336
14337 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14338 return true;
14339 else
14340 return false;
14341}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014342
14343/*===========================================================================
14344 * FUNCTION : adjustBlackLevelForCFA
14345 *
14346 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14347 * of bayer CFA (Color Filter Array).
14348 *
14349 * PARAMETERS : @input: black level pattern in the order of RGGB
14350 * @output: black level pattern in the order of CFA
14351 * @color_arrangement: CFA color arrangement
14352 *
14353 * RETURN : None
14354 *==========================================================================*/
14355template<typename T>
14356void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14357 T input[BLACK_LEVEL_PATTERN_CNT],
14358 T output[BLACK_LEVEL_PATTERN_CNT],
14359 cam_color_filter_arrangement_t color_arrangement)
14360{
14361 switch (color_arrangement) {
14362 case CAM_FILTER_ARRANGEMENT_GRBG:
14363 output[0] = input[1];
14364 output[1] = input[0];
14365 output[2] = input[3];
14366 output[3] = input[2];
14367 break;
14368 case CAM_FILTER_ARRANGEMENT_GBRG:
14369 output[0] = input[2];
14370 output[1] = input[3];
14371 output[2] = input[0];
14372 output[3] = input[1];
14373 break;
14374 case CAM_FILTER_ARRANGEMENT_BGGR:
14375 output[0] = input[3];
14376 output[1] = input[2];
14377 output[2] = input[1];
14378 output[3] = input[0];
14379 break;
14380 case CAM_FILTER_ARRANGEMENT_RGGB:
14381 output[0] = input[0];
14382 output[1] = input[1];
14383 output[2] = input[2];
14384 output[3] = input[3];
14385 break;
14386 default:
14387 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14388 break;
14389 }
14390}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014391
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014392void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14393 CameraMetadata &resultMetadata,
14394 std::shared_ptr<metadata_buffer_t> settings)
14395{
14396 if (settings == nullptr) {
14397 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14398 return;
14399 }
14400
14401 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14402 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14403 }
14404
14405 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14406 String8 str((const char *)gps_methods);
14407 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14408 }
14409
14410 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14411 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14412 }
14413
14414 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14415 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14416 }
14417
14418 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14419 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14420 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14421 }
14422
14423 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14424 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14425 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14426 }
14427
14428 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14429 int32_t fwk_thumb_size[2];
14430 fwk_thumb_size[0] = thumb_size->width;
14431 fwk_thumb_size[1] = thumb_size->height;
14432 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14433 }
14434
14435 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14436 uint8_t fwk_intent = intent[0];
14437 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14438 }
14439}
14440
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014441bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14442 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14443 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014444{
14445 if (hdrPlusRequest == nullptr) return false;
14446
14447 // Check noise reduction mode is high quality.
14448 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14449 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14450 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014451 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14452 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014453 return false;
14454 }
14455
14456 // Check edge mode is high quality.
14457 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14458 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14459 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14460 return false;
14461 }
14462
14463 if (request.num_output_buffers != 1 ||
14464 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14465 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014466 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14467 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14468 request.output_buffers[0].stream->width,
14469 request.output_buffers[0].stream->height,
14470 request.output_buffers[0].stream->format);
14471 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014472 return false;
14473 }
14474
14475 // Get a YUV buffer from pic channel.
14476 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14477 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14478 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14479 if (res != OK) {
14480 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14481 __FUNCTION__, strerror(-res), res);
14482 return false;
14483 }
14484
14485 pbcamera::StreamBuffer buffer;
14486 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014487 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014488 buffer.data = yuvBuffer->buffer;
14489 buffer.dataSize = yuvBuffer->frame_len;
14490
14491 pbcamera::CaptureRequest pbRequest;
14492 pbRequest.id = request.frame_number;
14493 pbRequest.outputBuffers.push_back(buffer);
14494
14495 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014496 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014497 if (res != OK) {
14498 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14499 strerror(-res), res);
14500 return false;
14501 }
14502
14503 hdrPlusRequest->yuvBuffer = yuvBuffer;
14504 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14505
14506 return true;
14507}
14508
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014509status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14510{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014511 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14512 return OK;
14513 }
14514
14515 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14516 if (res != OK) {
14517 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14518 strerror(-res), res);
14519 return res;
14520 }
14521 gHdrPlusClientOpening = true;
14522
14523 return OK;
14524}
14525
Chien-Yu Chenee335912017-02-09 17:53:20 -080014526status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14527{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014528 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014529
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014530 // Check if gHdrPlusClient is opened or being opened.
14531 if (gHdrPlusClient == nullptr) {
14532 if (gHdrPlusClientOpening) {
14533 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14534 return OK;
14535 }
14536
14537 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014538 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014539 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14540 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014541 return res;
14542 }
14543
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014544 // When opening HDR+ client completes, HDR+ mode will be enabled.
14545 return OK;
14546
Chien-Yu Chenee335912017-02-09 17:53:20 -080014547 }
14548
14549 // Configure stream for HDR+.
14550 res = configureHdrPlusStreamsLocked();
14551 if (res != OK) {
14552 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014553 return res;
14554 }
14555
14556 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14557 res = gHdrPlusClient->setZslHdrPlusMode(true);
14558 if (res != OK) {
14559 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014560 return res;
14561 }
14562
14563 mHdrPlusModeEnabled = true;
14564 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14565
14566 return OK;
14567}
14568
14569void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14570{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014571 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014572 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014573 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14574 if (res != OK) {
14575 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14576 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014577
14578 // Close HDR+ client so Easel can enter low power mode.
14579 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14580 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014581 }
14582
14583 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014584 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014585 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14586}
14587
14588status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014589{
14590 pbcamera::InputConfiguration inputConfig;
14591 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14592 status_t res = OK;
14593
14594 // Configure HDR+ client streams.
14595 // Get input config.
14596 if (mHdrPlusRawSrcChannel) {
14597 // HDR+ input buffers will be provided by HAL.
14598 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14599 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14600 if (res != OK) {
14601 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14602 __FUNCTION__, strerror(-res), res);
14603 return res;
14604 }
14605
14606 inputConfig.isSensorInput = false;
14607 } else {
14608 // Sensor MIPI will send data to Easel.
14609 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014610 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014611 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14612 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14613 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14614 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14615 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14616 if (mSensorModeInfo.num_raw_bits != 10) {
14617 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14618 mSensorModeInfo.num_raw_bits);
14619 return BAD_VALUE;
14620 }
14621
14622 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014623 }
14624
14625 // Get output configurations.
14626 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014627 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014628
14629 // Easel may need to output YUV output buffers if mPictureChannel was created.
14630 pbcamera::StreamConfiguration yuvOutputConfig;
14631 if (mPictureChannel != nullptr) {
14632 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14633 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14634 if (res != OK) {
14635 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14636 __FUNCTION__, strerror(-res), res);
14637
14638 return res;
14639 }
14640
14641 outputStreamConfigs.push_back(yuvOutputConfig);
14642 }
14643
14644 // TODO: consider other channels for YUV output buffers.
14645
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014646 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014647 if (res != OK) {
14648 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14649 strerror(-res), res);
14650 return res;
14651 }
14652
14653 return OK;
14654}
14655
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014656void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14657{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014658 if (client == nullptr) {
14659 ALOGE("%s: Opened client is null.", __FUNCTION__);
14660 return;
14661 }
14662
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014663 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014664 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14665
14666 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014667 if (!gHdrPlusClientOpening) {
14668 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14669 return;
14670 }
14671
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014672 gHdrPlusClient = std::move(client);
14673 gHdrPlusClientOpening = false;
14674
14675 // Set static metadata.
14676 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14677 if (res != OK) {
14678 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14679 __FUNCTION__, strerror(-res), res);
14680 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14681 gHdrPlusClient = nullptr;
14682 return;
14683 }
14684
14685 // Enable HDR+ mode.
14686 res = enableHdrPlusModeLocked();
14687 if (res != OK) {
14688 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14689 }
14690}
14691
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014692void QCamera3HardwareInterface::onOpenFailed(status_t err)
14693{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014694 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14695 Mutex::Autolock l(gHdrPlusClientLock);
14696 gHdrPlusClientOpening = false;
14697}
14698
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014699void QCamera3HardwareInterface::onFatalError()
14700{
14701 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14702
14703 // Set HAL state to error.
14704 pthread_mutex_lock(&mMutex);
14705 mState = ERROR;
14706 pthread_mutex_unlock(&mMutex);
14707
14708 handleCameraDeviceError();
14709}
14710
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014711void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014712 const camera_metadata_t &resultMetadata)
14713{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014714 if (result != nullptr) {
14715 if (result->outputBuffers.size() != 1) {
14716 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14717 result->outputBuffers.size());
14718 return;
14719 }
14720
14721 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14722 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14723 result->outputBuffers[0].streamId);
14724 return;
14725 }
14726
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014727 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014728 HdrPlusPendingRequest pendingRequest;
14729 {
14730 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14731 auto req = mHdrPlusPendingRequests.find(result->requestId);
14732 pendingRequest = req->second;
14733 }
14734
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014735 // Update the result metadata with the settings of the HDR+ still capture request because
14736 // the result metadata belongs to a ZSL buffer.
14737 CameraMetadata metadata;
14738 metadata = &resultMetadata;
14739 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14740 camera_metadata_t* updatedResultMetadata = metadata.release();
14741
14742 QCamera3PicChannel *picChannel =
14743 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14744
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014745 // Check if dumping HDR+ YUV output is enabled.
14746 char prop[PROPERTY_VALUE_MAX];
14747 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14748 bool dumpYuvOutput = atoi(prop);
14749
14750 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014751 // Dump yuv buffer to a ppm file.
14752 pbcamera::StreamConfiguration outputConfig;
14753 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14754 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14755 if (rc == OK) {
14756 char buf[FILENAME_MAX] = {};
14757 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14758 result->requestId, result->outputBuffers[0].streamId,
14759 outputConfig.image.width, outputConfig.image.height);
14760
14761 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14762 } else {
14763 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14764 __FUNCTION__, strerror(-rc), rc);
14765 }
14766 }
14767
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014768 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14769 auto halMetadata = std::make_shared<metadata_buffer_t>();
14770 clear_metadata_buffer(halMetadata.get());
14771
14772 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14773 // encoding.
14774 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14775 halStreamId, /*minFrameDuration*/0);
14776 if (res == OK) {
14777 // Return the buffer to pic channel for encoding.
14778 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14779 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14780 halMetadata);
14781 } else {
14782 // Return the buffer without encoding.
14783 // TODO: This should not happen but we may want to report an error buffer to camera
14784 // service.
14785 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14786 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14787 strerror(-res), res);
14788 }
14789
14790 // Send HDR+ metadata to framework.
14791 {
14792 pthread_mutex_lock(&mMutex);
14793
14794 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14795 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14796 pthread_mutex_unlock(&mMutex);
14797 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014798
14799 // Remove the HDR+ pending request.
14800 {
14801 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14802 auto req = mHdrPlusPendingRequests.find(result->requestId);
14803 mHdrPlusPendingRequests.erase(req);
14804 }
14805 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014806}
14807
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014808void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14809{
14810 if (failedResult == nullptr) {
14811 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14812 return;
14813 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014814
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014815 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014816
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014817 // Remove the pending HDR+ request.
14818 {
14819 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14820 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14821
14822 // Return the buffer to pic channel.
14823 QCamera3PicChannel *picChannel =
14824 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14825 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14826
14827 mHdrPlusPendingRequests.erase(pendingRequest);
14828 }
14829
14830 pthread_mutex_lock(&mMutex);
14831
14832 // Find the pending buffers.
14833 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14834 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14835 if (pendingBuffers->frame_number == failedResult->requestId) {
14836 break;
14837 }
14838 pendingBuffers++;
14839 }
14840
14841 // Send out buffer errors for the pending buffers.
14842 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14843 std::vector<camera3_stream_buffer_t> streamBuffers;
14844 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14845 // Prepare a stream buffer.
14846 camera3_stream_buffer_t streamBuffer = {};
14847 streamBuffer.stream = buffer.stream;
14848 streamBuffer.buffer = buffer.buffer;
14849 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14850 streamBuffer.acquire_fence = -1;
14851 streamBuffer.release_fence = -1;
14852
14853 streamBuffers.push_back(streamBuffer);
14854
14855 // Send out error buffer event.
14856 camera3_notify_msg_t notify_msg = {};
14857 notify_msg.type = CAMERA3_MSG_ERROR;
14858 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14859 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14860 notify_msg.message.error.error_stream = buffer.stream;
14861
14862 orchestrateNotify(&notify_msg);
14863 }
14864
14865 camera3_capture_result_t result = {};
14866 result.frame_number = pendingBuffers->frame_number;
14867 result.num_output_buffers = streamBuffers.size();
14868 result.output_buffers = &streamBuffers[0];
14869
14870 // Send out result with buffer errors.
14871 orchestrateResult(&result);
14872
14873 // Remove pending buffers.
14874 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14875 }
14876
14877 // Remove pending request.
14878 auto halRequest = mPendingRequestsList.begin();
14879 while (halRequest != mPendingRequestsList.end()) {
14880 if (halRequest->frame_number == failedResult->requestId) {
14881 mPendingRequestsList.erase(halRequest);
14882 break;
14883 }
14884 halRequest++;
14885 }
14886
14887 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014888}
14889
Thierry Strudel3d639192016-09-09 11:52:26 -070014890}; //end namespace qcamera