blob: 1b8a028f5fe6ee50fa49c81f8a634b364e503f18 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700503 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700504 mState(CLOSED),
505 mIsDeviceLinked(false),
506 mIsMainCamera(true),
507 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800509 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800510 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700511 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800512 mIsApInputUsedForHdrPlus(false),
513 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800514 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700515{
516 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700517 mCommon.init(gCamCapability[cameraId]);
518 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700519#ifndef USE_HAL_3_3
520 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
521#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700522 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700523#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700524 mCameraDevice.common.close = close_camera_device;
525 mCameraDevice.ops = &mCameraOps;
526 mCameraDevice.priv = this;
527 gCamCapability[cameraId]->version = CAM_HAL_V3;
528 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
529 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
530 gCamCapability[cameraId]->min_num_pp_bufs = 3;
531
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800532 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700533
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800534 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mPendingLiveRequest = 0;
536 mCurrentRequestId = -1;
537 pthread_mutex_init(&mMutex, NULL);
538
539 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
540 mDefaultMetadata[i] = NULL;
541
542 // Getting system props of different kinds
543 char prop[PROPERTY_VALUE_MAX];
544 memset(prop, 0, sizeof(prop));
545 property_get("persist.camera.raw.dump", prop, "0");
546 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800547 property_get("persist.camera.hal3.force.hdr", prop, "0");
548 mForceHdrSnapshot = atoi(prop);
549
Thierry Strudel3d639192016-09-09 11:52:26 -0700550 if (mEnableRawDump)
551 LOGD("Raw dump from Camera HAL enabled");
552
553 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
554 memset(mLdafCalib, 0, sizeof(mLdafCalib));
555
556 memset(prop, 0, sizeof(prop));
557 property_get("persist.camera.tnr.preview", prop, "0");
558 m_bTnrPreview = (uint8_t)atoi(prop);
559
560 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800561 property_get("persist.camera.swtnr.preview", prop, "1");
562 m_bSwTnrPreview = (uint8_t)atoi(prop);
563
564 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700565 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700566 m_bTnrVideo = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
569 property_get("persist.camera.avtimer.debug", prop, "0");
570 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800571 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700572
Thierry Strudel54dc9782017-02-15 12:12:10 -0800573 memset(prop, 0, sizeof(prop));
574 property_get("persist.camera.cacmode.disable", prop, "0");
575 m_cacModeDisabled = (uint8_t)atoi(prop);
576
Thierry Strudel3d639192016-09-09 11:52:26 -0700577 //Load and read GPU library.
578 lib_surface_utils = NULL;
579 LINK_get_surface_pixel_alignment = NULL;
580 mSurfaceStridePadding = CAM_PAD_TO_32;
581 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
582 if (lib_surface_utils) {
583 *(void **)&LINK_get_surface_pixel_alignment =
584 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
585 if (LINK_get_surface_pixel_alignment) {
586 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
587 }
588 dlclose(lib_surface_utils);
589 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700590
Emilian Peev0f3c3162017-03-15 12:57:46 +0000591 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
592 mPDSupported = (0 <= mPDIndex) ? true : false;
593
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700594 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700595}
596
597/*===========================================================================
598 * FUNCTION : ~QCamera3HardwareInterface
599 *
600 * DESCRIPTION: destructor of QCamera3HardwareInterface
601 *
602 * PARAMETERS : none
603 *
604 * RETURN : none
605 *==========================================================================*/
606QCamera3HardwareInterface::~QCamera3HardwareInterface()
607{
608 LOGD("E");
609
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800610 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700611
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800612 // Disable power hint and enable the perf lock for close camera
613 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
614 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
615
616 // unlink of dualcam during close camera
617 if (mIsDeviceLinked) {
618 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
619 &m_pDualCamCmdPtr->bundle_info;
620 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
621 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
622 pthread_mutex_lock(&gCamLock);
623
624 if (mIsMainCamera == 1) {
625 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
626 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
627 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
628 // related session id should be session id of linked session
629 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
630 } else {
631 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
632 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
633 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
634 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
635 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800636 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800637 pthread_mutex_unlock(&gCamLock);
638
639 rc = mCameraHandle->ops->set_dual_cam_cmd(
640 mCameraHandle->camera_handle);
641 if (rc < 0) {
642 LOGE("Dualcam: Unlink failed, but still proceed to close");
643 }
644 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700645
646 /* We need to stop all streams before deleting any stream */
647 if (mRawDumpChannel) {
648 mRawDumpChannel->stop();
649 }
650
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700651 if (mHdrPlusRawSrcChannel) {
652 mHdrPlusRawSrcChannel->stop();
653 }
654
Thierry Strudel3d639192016-09-09 11:52:26 -0700655 // NOTE: 'camera3_stream_t *' objects are already freed at
656 // this stage by the framework
657 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
658 it != mStreamInfo.end(); it++) {
659 QCamera3ProcessingChannel *channel = (*it)->channel;
660 if (channel) {
661 channel->stop();
662 }
663 }
664 if (mSupportChannel)
665 mSupportChannel->stop();
666
667 if (mAnalysisChannel) {
668 mAnalysisChannel->stop();
669 }
670 if (mMetadataChannel) {
671 mMetadataChannel->stop();
672 }
673 if (mChannelHandle) {
674 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
675 mChannelHandle);
676 LOGD("stopping channel %d", mChannelHandle);
677 }
678
679 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
680 it != mStreamInfo.end(); it++) {
681 QCamera3ProcessingChannel *channel = (*it)->channel;
682 if (channel)
683 delete channel;
684 free (*it);
685 }
686 if (mSupportChannel) {
687 delete mSupportChannel;
688 mSupportChannel = NULL;
689 }
690
691 if (mAnalysisChannel) {
692 delete mAnalysisChannel;
693 mAnalysisChannel = NULL;
694 }
695 if (mRawDumpChannel) {
696 delete mRawDumpChannel;
697 mRawDumpChannel = NULL;
698 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700699 if (mHdrPlusRawSrcChannel) {
700 delete mHdrPlusRawSrcChannel;
701 mHdrPlusRawSrcChannel = NULL;
702 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700703 if (mDummyBatchChannel) {
704 delete mDummyBatchChannel;
705 mDummyBatchChannel = NULL;
706 }
707
708 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800709 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700710
711 if (mMetadataChannel) {
712 delete mMetadataChannel;
713 mMetadataChannel = NULL;
714 }
715
716 /* Clean up all channels */
717 if (mCameraInitialized) {
718 if(!mFirstConfiguration){
719 //send the last unconfigure
720 cam_stream_size_info_t stream_config_info;
721 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
722 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
723 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800724 m_bIs4KVideo ? 0 :
725 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700726 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700727 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
728 stream_config_info);
729 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
730 if (rc < 0) {
731 LOGE("set_parms failed for unconfigure");
732 }
733 }
734 deinitParameters();
735 }
736
737 if (mChannelHandle) {
738 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
739 mChannelHandle);
740 LOGH("deleting channel %d", mChannelHandle);
741 mChannelHandle = 0;
742 }
743
744 if (mState != CLOSED)
745 closeCamera();
746
747 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
748 req.mPendingBufferList.clear();
749 }
750 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700751 for (pendingRequestIterator i = mPendingRequestsList.begin();
752 i != mPendingRequestsList.end();) {
753 i = erasePendingRequest(i);
754 }
755 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
756 if (mDefaultMetadata[i])
757 free_camera_metadata(mDefaultMetadata[i]);
758
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800759 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700760
761 pthread_cond_destroy(&mRequestCond);
762
763 pthread_cond_destroy(&mBuffersCond);
764
765 pthread_mutex_destroy(&mMutex);
766 LOGD("X");
767}
768
769/*===========================================================================
770 * FUNCTION : erasePendingRequest
771 *
772 * DESCRIPTION: function to erase a desired pending request after freeing any
773 * allocated memory
774 *
775 * PARAMETERS :
776 * @i : iterator pointing to pending request to be erased
777 *
778 * RETURN : iterator pointing to the next request
779 *==========================================================================*/
780QCamera3HardwareInterface::pendingRequestIterator
781 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
782{
783 if (i->input_buffer != NULL) {
784 free(i->input_buffer);
785 i->input_buffer = NULL;
786 }
787 if (i->settings != NULL)
788 free_camera_metadata((camera_metadata_t*)i->settings);
789 return mPendingRequestsList.erase(i);
790}
791
792/*===========================================================================
793 * FUNCTION : camEvtHandle
794 *
795 * DESCRIPTION: Function registered to mm-camera-interface to handle events
796 *
797 * PARAMETERS :
798 * @camera_handle : interface layer camera handle
799 * @evt : ptr to event
800 * @user_data : user data ptr
801 *
802 * RETURN : none
803 *==========================================================================*/
804void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
805 mm_camera_event_t *evt,
806 void *user_data)
807{
808 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
809 if (obj && evt) {
810 switch(evt->server_event_type) {
811 case CAM_EVENT_TYPE_DAEMON_DIED:
812 pthread_mutex_lock(&obj->mMutex);
813 obj->mState = ERROR;
814 pthread_mutex_unlock(&obj->mMutex);
815 LOGE("Fatal, camera daemon died");
816 break;
817
818 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
819 LOGD("HAL got request pull from Daemon");
820 pthread_mutex_lock(&obj->mMutex);
821 obj->mWokenUpByDaemon = true;
822 obj->unblockRequestIfNecessary();
823 pthread_mutex_unlock(&obj->mMutex);
824 break;
825
826 default:
827 LOGW("Warning: Unhandled event %d",
828 evt->server_event_type);
829 break;
830 }
831 } else {
832 LOGE("NULL user_data/evt");
833 }
834}
835
836/*===========================================================================
837 * FUNCTION : openCamera
838 *
839 * DESCRIPTION: open camera
840 *
841 * PARAMETERS :
842 * @hw_device : double ptr for camera device struct
843 *
844 * RETURN : int32_t type of status
845 * NO_ERROR -- success
846 * none-zero failure code
847 *==========================================================================*/
848int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
849{
850 int rc = 0;
851 if (mState != CLOSED) {
852 *hw_device = NULL;
853 return PERMISSION_DENIED;
854 }
855
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700856 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800857 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700858 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
859 mCameraId);
860
861 rc = openCamera();
862 if (rc == 0) {
863 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800864 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700865 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800866 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700867
Thierry Strudel3d639192016-09-09 11:52:26 -0700868 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
869 mCameraId, rc);
870
871 if (rc == NO_ERROR) {
872 mState = OPENED;
873 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800874
Thierry Strudel3d639192016-09-09 11:52:26 -0700875 return rc;
876}
877
878/*===========================================================================
879 * FUNCTION : openCamera
880 *
881 * DESCRIPTION: open camera
882 *
883 * PARAMETERS : none
884 *
885 * RETURN : int32_t type of status
886 * NO_ERROR -- success
887 * none-zero failure code
888 *==========================================================================*/
889int QCamera3HardwareInterface::openCamera()
890{
891 int rc = 0;
892 char value[PROPERTY_VALUE_MAX];
893
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800894 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700895 if (mCameraHandle) {
896 LOGE("Failure: Camera already opened");
897 return ALREADY_EXISTS;
898 }
899
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700900 {
901 Mutex::Autolock l(gHdrPlusClientLock);
902 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700903 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700904 rc = gEaselManagerClient.resume();
905 if (rc != 0) {
906 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
907 return rc;
908 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800909 }
910 }
911
Thierry Strudel3d639192016-09-09 11:52:26 -0700912 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
913 if (rc < 0) {
914 LOGE("Failed to reserve flash for camera id: %d",
915 mCameraId);
916 return UNKNOWN_ERROR;
917 }
918
919 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
920 if (rc) {
921 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
922 return rc;
923 }
924
925 if (!mCameraHandle) {
926 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
927 return -ENODEV;
928 }
929
930 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
931 camEvtHandle, (void *)this);
932
933 if (rc < 0) {
934 LOGE("Error, failed to register event callback");
935 /* Not closing camera here since it is already handled in destructor */
936 return FAILED_TRANSACTION;
937 }
938
939 mExifParams.debug_params =
940 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
941 if (mExifParams.debug_params) {
942 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
943 } else {
944 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
945 return NO_MEMORY;
946 }
947 mFirstConfiguration = true;
948
949 //Notify display HAL that a camera session is active.
950 //But avoid calling the same during bootup because camera service might open/close
951 //cameras at boot time during its initialization and display service will also internally
952 //wait for camera service to initialize first while calling this display API, resulting in a
953 //deadlock situation. Since boot time camera open/close calls are made only to fetch
954 //capabilities, no need of this display bw optimization.
955 //Use "service.bootanim.exit" property to know boot status.
956 property_get("service.bootanim.exit", value, "0");
957 if (atoi(value) == 1) {
958 pthread_mutex_lock(&gCamLock);
959 if (gNumCameraSessions++ == 0) {
960 setCameraLaunchStatus(true);
961 }
962 pthread_mutex_unlock(&gCamLock);
963 }
964
965 //fill the session id needed while linking dual cam
966 pthread_mutex_lock(&gCamLock);
967 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
968 &sessionId[mCameraId]);
969 pthread_mutex_unlock(&gCamLock);
970
971 if (rc < 0) {
972 LOGE("Error, failed to get sessiion id");
973 return UNKNOWN_ERROR;
974 } else {
975 //Allocate related cam sync buffer
976 //this is needed for the payload that goes along with bundling cmd for related
977 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700978 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
979 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700980 if(rc != OK) {
981 rc = NO_MEMORY;
982 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
983 return NO_MEMORY;
984 }
985
986 //Map memory for related cam sync buffer
987 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700988 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
989 m_pDualCamCmdHeap->getFd(0),
990 sizeof(cam_dual_camera_cmd_info_t),
991 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700992 if(rc < 0) {
993 LOGE("Dualcam: failed to map Related cam sync buffer");
994 rc = FAILED_TRANSACTION;
995 return NO_MEMORY;
996 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700997 m_pDualCamCmdPtr =
998 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700999 }
1000
1001 LOGH("mCameraId=%d",mCameraId);
1002
1003 return NO_ERROR;
1004}
1005
1006/*===========================================================================
1007 * FUNCTION : closeCamera
1008 *
1009 * DESCRIPTION: close camera
1010 *
1011 * PARAMETERS : none
1012 *
1013 * RETURN : int32_t type of status
1014 * NO_ERROR -- success
1015 * none-zero failure code
1016 *==========================================================================*/
1017int QCamera3HardwareInterface::closeCamera()
1018{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001019 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001020 int rc = NO_ERROR;
1021 char value[PROPERTY_VALUE_MAX];
1022
1023 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1024 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001025
1026 // unmap memory for related cam sync buffer
1027 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001028 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001029 if (NULL != m_pDualCamCmdHeap) {
1030 m_pDualCamCmdHeap->deallocate();
1031 delete m_pDualCamCmdHeap;
1032 m_pDualCamCmdHeap = NULL;
1033 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001034 }
1035
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1037 mCameraHandle = NULL;
1038
1039 //reset session id to some invalid id
1040 pthread_mutex_lock(&gCamLock);
1041 sessionId[mCameraId] = 0xDEADBEEF;
1042 pthread_mutex_unlock(&gCamLock);
1043
1044 //Notify display HAL that there is no active camera session
1045 //but avoid calling the same during bootup. Refer to openCamera
1046 //for more details.
1047 property_get("service.bootanim.exit", value, "0");
1048 if (atoi(value) == 1) {
1049 pthread_mutex_lock(&gCamLock);
1050 if (--gNumCameraSessions == 0) {
1051 setCameraLaunchStatus(false);
1052 }
1053 pthread_mutex_unlock(&gCamLock);
1054 }
1055
Thierry Strudel3d639192016-09-09 11:52:26 -07001056 if (mExifParams.debug_params) {
1057 free(mExifParams.debug_params);
1058 mExifParams.debug_params = NULL;
1059 }
1060 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1061 LOGW("Failed to release flash for camera id: %d",
1062 mCameraId);
1063 }
1064 mState = CLOSED;
1065 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1066 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001067
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001068 {
1069 Mutex::Autolock l(gHdrPlusClientLock);
1070 if (gHdrPlusClient != nullptr) {
1071 // Disable HDR+ mode.
1072 disableHdrPlusModeLocked();
1073 // Disconnect Easel if it's connected.
1074 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1075 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001076 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001077
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001078 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001079 rc = gEaselManagerClient.stopMipi(mCameraId);
1080 if (rc != 0) {
1081 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1082 }
1083
1084 rc = gEaselManagerClient.suspend();
1085 if (rc != 0) {
1086 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1087 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001088 }
1089 }
1090
Thierry Strudel3d639192016-09-09 11:52:26 -07001091 return rc;
1092}
1093
1094/*===========================================================================
1095 * FUNCTION : initialize
1096 *
1097 * DESCRIPTION: Initialize frameworks callback functions
1098 *
1099 * PARAMETERS :
1100 * @callback_ops : callback function to frameworks
1101 *
1102 * RETURN :
1103 *
1104 *==========================================================================*/
1105int QCamera3HardwareInterface::initialize(
1106 const struct camera3_callback_ops *callback_ops)
1107{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001108 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001109 int rc;
1110
1111 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1112 pthread_mutex_lock(&mMutex);
1113
1114 // Validate current state
1115 switch (mState) {
1116 case OPENED:
1117 /* valid state */
1118 break;
1119 default:
1120 LOGE("Invalid state %d", mState);
1121 rc = -ENODEV;
1122 goto err1;
1123 }
1124
1125 rc = initParameters();
1126 if (rc < 0) {
1127 LOGE("initParamters failed %d", rc);
1128 goto err1;
1129 }
1130 mCallbackOps = callback_ops;
1131
1132 mChannelHandle = mCameraHandle->ops->add_channel(
1133 mCameraHandle->camera_handle, NULL, NULL, this);
1134 if (mChannelHandle == 0) {
1135 LOGE("add_channel failed");
1136 rc = -ENOMEM;
1137 pthread_mutex_unlock(&mMutex);
1138 return rc;
1139 }
1140
1141 pthread_mutex_unlock(&mMutex);
1142 mCameraInitialized = true;
1143 mState = INITIALIZED;
1144 LOGI("X");
1145 return 0;
1146
1147err1:
1148 pthread_mutex_unlock(&mMutex);
1149 return rc;
1150}
1151
1152/*===========================================================================
1153 * FUNCTION : validateStreamDimensions
1154 *
1155 * DESCRIPTION: Check if the configuration requested are those advertised
1156 *
1157 * PARAMETERS :
1158 * @stream_list : streams to be configured
1159 *
1160 * RETURN :
1161 *
1162 *==========================================================================*/
1163int QCamera3HardwareInterface::validateStreamDimensions(
1164 camera3_stream_configuration_t *streamList)
1165{
1166 int rc = NO_ERROR;
1167 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001168 uint32_t depthWidth = 0;
1169 uint32_t depthHeight = 0;
1170 if (mPDSupported) {
1171 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1172 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1173 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001174
1175 camera3_stream_t *inputStream = NULL;
1176 /*
1177 * Loop through all streams to find input stream if it exists*
1178 */
1179 for (size_t i = 0; i< streamList->num_streams; i++) {
1180 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1181 if (inputStream != NULL) {
1182 LOGE("Error, Multiple input streams requested");
1183 return -EINVAL;
1184 }
1185 inputStream = streamList->streams[i];
1186 }
1187 }
1188 /*
1189 * Loop through all streams requested in configuration
1190 * Check if unsupported sizes have been requested on any of them
1191 */
1192 for (size_t j = 0; j < streamList->num_streams; j++) {
1193 bool sizeFound = false;
1194 camera3_stream_t *newStream = streamList->streams[j];
1195
1196 uint32_t rotatedHeight = newStream->height;
1197 uint32_t rotatedWidth = newStream->width;
1198 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1199 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1200 rotatedHeight = newStream->width;
1201 rotatedWidth = newStream->height;
1202 }
1203
1204 /*
1205 * Sizes are different for each type of stream format check against
1206 * appropriate table.
1207 */
1208 switch (newStream->format) {
1209 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1210 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1211 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001212 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1213 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1214 mPDSupported) {
1215 if ((depthWidth == newStream->width) &&
1216 (depthHeight == newStream->height)) {
1217 sizeFound = true;
1218 }
1219 break;
1220 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001221 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1222 for (size_t i = 0; i < count; i++) {
1223 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1224 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1225 sizeFound = true;
1226 break;
1227 }
1228 }
1229 break;
1230 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001231 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1232 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001233 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001234 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001235 if ((depthSamplesCount == newStream->width) &&
1236 (1 == newStream->height)) {
1237 sizeFound = true;
1238 }
1239 break;
1240 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001241 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1242 /* Verify set size against generated sizes table */
1243 for (size_t i = 0; i < count; i++) {
1244 if (((int32_t)rotatedWidth ==
1245 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1246 ((int32_t)rotatedHeight ==
1247 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1248 sizeFound = true;
1249 break;
1250 }
1251 }
1252 break;
1253 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1254 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1255 default:
1256 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1257 || newStream->stream_type == CAMERA3_STREAM_INPUT
1258 || IS_USAGE_ZSL(newStream->usage)) {
1259 if (((int32_t)rotatedWidth ==
1260 gCamCapability[mCameraId]->active_array_size.width) &&
1261 ((int32_t)rotatedHeight ==
1262 gCamCapability[mCameraId]->active_array_size.height)) {
1263 sizeFound = true;
1264 break;
1265 }
1266 /* We could potentially break here to enforce ZSL stream
1267 * set from frameworks always is full active array size
1268 * but it is not clear from the spc if framework will always
1269 * follow that, also we have logic to override to full array
1270 * size, so keeping the logic lenient at the moment
1271 */
1272 }
1273 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1274 MAX_SIZES_CNT);
1275 for (size_t i = 0; i < count; i++) {
1276 if (((int32_t)rotatedWidth ==
1277 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1278 ((int32_t)rotatedHeight ==
1279 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1280 sizeFound = true;
1281 break;
1282 }
1283 }
1284 break;
1285 } /* End of switch(newStream->format) */
1286
1287 /* We error out even if a single stream has unsupported size set */
1288 if (!sizeFound) {
1289 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1290 rotatedWidth, rotatedHeight, newStream->format,
1291 gCamCapability[mCameraId]->active_array_size.width,
1292 gCamCapability[mCameraId]->active_array_size.height);
1293 rc = -EINVAL;
1294 break;
1295 }
1296 } /* End of for each stream */
1297 return rc;
1298}
1299
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001300/*===========================================================================
1301 * FUNCTION : validateUsageFlags
1302 *
1303 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1304 *
1305 * PARAMETERS :
1306 * @stream_list : streams to be configured
1307 *
1308 * RETURN :
1309 * NO_ERROR if the usage flags are supported
1310 * error code if usage flags are not supported
1311 *
1312 *==========================================================================*/
1313int QCamera3HardwareInterface::validateUsageFlags(
1314 const camera3_stream_configuration_t* streamList)
1315{
1316 for (size_t j = 0; j < streamList->num_streams; j++) {
1317 const camera3_stream_t *newStream = streamList->streams[j];
1318
1319 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1320 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1321 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1322 continue;
1323 }
1324
1325 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1326 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1327 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1328 bool forcePreviewUBWC = true;
1329 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1330 forcePreviewUBWC = false;
1331 }
1332 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1333 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1334 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1335 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1336 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1337 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1338
1339 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1340 // So color spaces will always match.
1341
1342 // Check whether underlying formats of shared streams match.
1343 if (isVideo && isPreview && videoFormat != previewFormat) {
1344 LOGE("Combined video and preview usage flag is not supported");
1345 return -EINVAL;
1346 }
1347 if (isPreview && isZSL && previewFormat != zslFormat) {
1348 LOGE("Combined preview and zsl usage flag is not supported");
1349 return -EINVAL;
1350 }
1351 if (isVideo && isZSL && videoFormat != zslFormat) {
1352 LOGE("Combined video and zsl usage flag is not supported");
1353 return -EINVAL;
1354 }
1355 }
1356 return NO_ERROR;
1357}
1358
1359/*===========================================================================
1360 * FUNCTION : validateUsageFlagsForEis
1361 *
1362 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1363 *
1364 * PARAMETERS :
1365 * @stream_list : streams to be configured
1366 *
1367 * RETURN :
1368 * NO_ERROR if the usage flags are supported
1369 * error code if usage flags are not supported
1370 *
1371 *==========================================================================*/
1372int QCamera3HardwareInterface::validateUsageFlagsForEis(
1373 const camera3_stream_configuration_t* streamList)
1374{
1375 for (size_t j = 0; j < streamList->num_streams; j++) {
1376 const camera3_stream_t *newStream = streamList->streams[j];
1377
1378 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1379 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1380
1381 // Because EIS is "hard-coded" for certain use case, and current
1382 // implementation doesn't support shared preview and video on the same
1383 // stream, return failure if EIS is forced on.
1384 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1385 LOGE("Combined video and preview usage flag is not supported due to EIS");
1386 return -EINVAL;
1387 }
1388 }
1389 return NO_ERROR;
1390}
1391
Thierry Strudel3d639192016-09-09 11:52:26 -07001392/*==============================================================================
1393 * FUNCTION : isSupportChannelNeeded
1394 *
1395 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1396 *
1397 * PARAMETERS :
1398 * @stream_list : streams to be configured
1399 * @stream_config_info : the config info for streams to be configured
1400 *
1401 * RETURN : Boolen true/false decision
1402 *
1403 *==========================================================================*/
1404bool QCamera3HardwareInterface::isSupportChannelNeeded(
1405 camera3_stream_configuration_t *streamList,
1406 cam_stream_size_info_t stream_config_info)
1407{
1408 uint32_t i;
1409 bool pprocRequested = false;
1410 /* Check for conditions where PProc pipeline does not have any streams*/
1411 for (i = 0; i < stream_config_info.num_streams; i++) {
1412 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1413 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1414 pprocRequested = true;
1415 break;
1416 }
1417 }
1418
1419 if (pprocRequested == false )
1420 return true;
1421
1422 /* Dummy stream needed if only raw or jpeg streams present */
1423 for (i = 0; i < streamList->num_streams; i++) {
1424 switch(streamList->streams[i]->format) {
1425 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1426 case HAL_PIXEL_FORMAT_RAW10:
1427 case HAL_PIXEL_FORMAT_RAW16:
1428 case HAL_PIXEL_FORMAT_BLOB:
1429 break;
1430 default:
1431 return false;
1432 }
1433 }
1434 return true;
1435}
1436
1437/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001438 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001439 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001440 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001441 *
1442 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001443 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001444 *
1445 * RETURN : int32_t type of status
1446 * NO_ERROR -- success
1447 * none-zero failure code
1448 *
1449 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001450int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001451{
1452 int32_t rc = NO_ERROR;
1453
1454 cam_dimension_t max_dim = {0, 0};
1455 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1456 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1457 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1458 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1459 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1460 }
1461
1462 clear_metadata_buffer(mParameters);
1463
1464 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1465 max_dim);
1466 if (rc != NO_ERROR) {
1467 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1468 return rc;
1469 }
1470
1471 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1472 if (rc != NO_ERROR) {
1473 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1474 return rc;
1475 }
1476
1477 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001478 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001479
1480 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1481 mParameters);
1482 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001483 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001484 return rc;
1485 }
1486
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001487 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001488 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1489 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1490 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1491 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1492 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001493
1494 return rc;
1495}
1496
1497/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001498 * FUNCTION : addToPPFeatureMask
1499 *
1500 * DESCRIPTION: add additional features to pp feature mask based on
1501 * stream type and usecase
1502 *
1503 * PARAMETERS :
1504 * @stream_format : stream type for feature mask
1505 * @stream_idx : stream idx within postprocess_mask list to change
1506 *
1507 * RETURN : NULL
1508 *
1509 *==========================================================================*/
1510void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1511 uint32_t stream_idx)
1512{
1513 char feature_mask_value[PROPERTY_VALUE_MAX];
1514 cam_feature_mask_t feature_mask;
1515 int args_converted;
1516 int property_len;
1517
1518 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001519#ifdef _LE_CAMERA_
1520 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1521 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1522 property_len = property_get("persist.camera.hal3.feature",
1523 feature_mask_value, swtnr_feature_mask_value);
1524#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001525 property_len = property_get("persist.camera.hal3.feature",
1526 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001527#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001528 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1529 (feature_mask_value[1] == 'x')) {
1530 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1531 } else {
1532 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1533 }
1534 if (1 != args_converted) {
1535 feature_mask = 0;
1536 LOGE("Wrong feature mask %s", feature_mask_value);
1537 return;
1538 }
1539
1540 switch (stream_format) {
1541 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1542 /* Add LLVD to pp feature mask only if video hint is enabled */
1543 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1544 mStreamConfigInfo.postprocess_mask[stream_idx]
1545 |= CAM_QTI_FEATURE_SW_TNR;
1546 LOGH("Added SW TNR to pp feature mask");
1547 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1548 mStreamConfigInfo.postprocess_mask[stream_idx]
1549 |= CAM_QCOM_FEATURE_LLVD;
1550 LOGH("Added LLVD SeeMore to pp feature mask");
1551 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001552 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1553 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1554 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1555 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001556 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1557 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1558 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1559 CAM_QTI_FEATURE_BINNING_CORRECTION;
1560 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001561 break;
1562 }
1563 default:
1564 break;
1565 }
1566 LOGD("PP feature mask %llx",
1567 mStreamConfigInfo.postprocess_mask[stream_idx]);
1568}
1569
1570/*==============================================================================
1571 * FUNCTION : updateFpsInPreviewBuffer
1572 *
1573 * DESCRIPTION: update FPS information in preview buffer.
1574 *
1575 * PARAMETERS :
1576 * @metadata : pointer to metadata buffer
1577 * @frame_number: frame_number to look for in pending buffer list
1578 *
1579 * RETURN : None
1580 *
1581 *==========================================================================*/
1582void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1583 uint32_t frame_number)
1584{
1585 // Mark all pending buffers for this particular request
1586 // with corresponding framerate information
1587 for (List<PendingBuffersInRequest>::iterator req =
1588 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1589 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1590 for(List<PendingBufferInfo>::iterator j =
1591 req->mPendingBufferList.begin();
1592 j != req->mPendingBufferList.end(); j++) {
1593 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1594 if ((req->frame_number == frame_number) &&
1595 (channel->getStreamTypeMask() &
1596 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1597 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1598 CAM_INTF_PARM_FPS_RANGE, metadata) {
1599 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1600 struct private_handle_t *priv_handle =
1601 (struct private_handle_t *)(*(j->buffer));
1602 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1603 }
1604 }
1605 }
1606 }
1607}
1608
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001609/*==============================================================================
1610 * FUNCTION : updateTimeStampInPendingBuffers
1611 *
1612 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1613 * of a frame number
1614 *
1615 * PARAMETERS :
1616 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1617 * @timestamp : timestamp to be set
1618 *
1619 * RETURN : None
1620 *
1621 *==========================================================================*/
1622void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1623 uint32_t frameNumber, nsecs_t timestamp)
1624{
1625 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1626 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1627 if (req->frame_number != frameNumber)
1628 continue;
1629
1630 for (auto k = req->mPendingBufferList.begin();
1631 k != req->mPendingBufferList.end(); k++ ) {
1632 struct private_handle_t *priv_handle =
1633 (struct private_handle_t *) (*(k->buffer));
1634 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1635 }
1636 }
1637 return;
1638}
1639
Thierry Strudel3d639192016-09-09 11:52:26 -07001640/*===========================================================================
1641 * FUNCTION : configureStreams
1642 *
1643 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1644 * and output streams.
1645 *
1646 * PARAMETERS :
1647 * @stream_list : streams to be configured
1648 *
1649 * RETURN :
1650 *
1651 *==========================================================================*/
1652int QCamera3HardwareInterface::configureStreams(
1653 camera3_stream_configuration_t *streamList)
1654{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001655 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001656 int rc = 0;
1657
1658 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001659 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001660 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001661 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001662
1663 return rc;
1664}
1665
1666/*===========================================================================
1667 * FUNCTION : configureStreamsPerfLocked
1668 *
1669 * DESCRIPTION: configureStreams while perfLock is held.
1670 *
1671 * PARAMETERS :
1672 * @stream_list : streams to be configured
1673 *
1674 * RETURN : int32_t type of status
1675 * NO_ERROR -- success
1676 * none-zero failure code
1677 *==========================================================================*/
1678int QCamera3HardwareInterface::configureStreamsPerfLocked(
1679 camera3_stream_configuration_t *streamList)
1680{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001681 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001682 int rc = 0;
1683
1684 // Sanity check stream_list
1685 if (streamList == NULL) {
1686 LOGE("NULL stream configuration");
1687 return BAD_VALUE;
1688 }
1689 if (streamList->streams == NULL) {
1690 LOGE("NULL stream list");
1691 return BAD_VALUE;
1692 }
1693
1694 if (streamList->num_streams < 1) {
1695 LOGE("Bad number of streams requested: %d",
1696 streamList->num_streams);
1697 return BAD_VALUE;
1698 }
1699
1700 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1701 LOGE("Maximum number of streams %d exceeded: %d",
1702 MAX_NUM_STREAMS, streamList->num_streams);
1703 return BAD_VALUE;
1704 }
1705
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001706 rc = validateUsageFlags(streamList);
1707 if (rc != NO_ERROR) {
1708 return rc;
1709 }
1710
Thierry Strudel3d639192016-09-09 11:52:26 -07001711 mOpMode = streamList->operation_mode;
1712 LOGD("mOpMode: %d", mOpMode);
1713
1714 /* first invalidate all the steams in the mStreamList
1715 * if they appear again, they will be validated */
1716 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1717 it != mStreamInfo.end(); it++) {
1718 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1719 if (channel) {
1720 channel->stop();
1721 }
1722 (*it)->status = INVALID;
1723 }
1724
1725 if (mRawDumpChannel) {
1726 mRawDumpChannel->stop();
1727 delete mRawDumpChannel;
1728 mRawDumpChannel = NULL;
1729 }
1730
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001731 if (mHdrPlusRawSrcChannel) {
1732 mHdrPlusRawSrcChannel->stop();
1733 delete mHdrPlusRawSrcChannel;
1734 mHdrPlusRawSrcChannel = NULL;
1735 }
1736
Thierry Strudel3d639192016-09-09 11:52:26 -07001737 if (mSupportChannel)
1738 mSupportChannel->stop();
1739
1740 if (mAnalysisChannel) {
1741 mAnalysisChannel->stop();
1742 }
1743 if (mMetadataChannel) {
1744 /* If content of mStreamInfo is not 0, there is metadata stream */
1745 mMetadataChannel->stop();
1746 }
1747 if (mChannelHandle) {
1748 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1749 mChannelHandle);
1750 LOGD("stopping channel %d", mChannelHandle);
1751 }
1752
1753 pthread_mutex_lock(&mMutex);
1754
1755 // Check state
1756 switch (mState) {
1757 case INITIALIZED:
1758 case CONFIGURED:
1759 case STARTED:
1760 /* valid state */
1761 break;
1762 default:
1763 LOGE("Invalid state %d", mState);
1764 pthread_mutex_unlock(&mMutex);
1765 return -ENODEV;
1766 }
1767
1768 /* Check whether we have video stream */
1769 m_bIs4KVideo = false;
1770 m_bIsVideo = false;
1771 m_bEisSupportedSize = false;
1772 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001773 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001774 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001775 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001776 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001777 uint32_t videoWidth = 0U;
1778 uint32_t videoHeight = 0U;
1779 size_t rawStreamCnt = 0;
1780 size_t stallStreamCnt = 0;
1781 size_t processedStreamCnt = 0;
1782 // Number of streams on ISP encoder path
1783 size_t numStreamsOnEncoder = 0;
1784 size_t numYuv888OnEncoder = 0;
1785 bool bYuv888OverrideJpeg = false;
1786 cam_dimension_t largeYuv888Size = {0, 0};
1787 cam_dimension_t maxViewfinderSize = {0, 0};
1788 bool bJpegExceeds4K = false;
1789 bool bJpegOnEncoder = false;
1790 bool bUseCommonFeatureMask = false;
1791 cam_feature_mask_t commonFeatureMask = 0;
1792 bool bSmallJpegSize = false;
1793 uint32_t width_ratio;
1794 uint32_t height_ratio;
1795 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1796 camera3_stream_t *inputStream = NULL;
1797 bool isJpeg = false;
1798 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001799 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001800 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001801
1802 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1803
1804 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001805 uint8_t eis_prop_set;
1806 uint32_t maxEisWidth = 0;
1807 uint32_t maxEisHeight = 0;
1808
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001809 // Initialize all instant AEC related variables
1810 mInstantAEC = false;
1811 mResetInstantAEC = false;
1812 mInstantAECSettledFrameNumber = 0;
1813 mAecSkipDisplayFrameBound = 0;
1814 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001815 mCurrFeatureState = 0;
1816 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001817
Thierry Strudel3d639192016-09-09 11:52:26 -07001818 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1819
1820 size_t count = IS_TYPE_MAX;
1821 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1822 for (size_t i = 0; i < count; i++) {
1823 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001824 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1825 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001826 break;
1827 }
1828 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001829
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001830 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001831 maxEisWidth = MAX_EIS_WIDTH;
1832 maxEisHeight = MAX_EIS_HEIGHT;
1833 }
1834
1835 /* EIS setprop control */
1836 char eis_prop[PROPERTY_VALUE_MAX];
1837 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001838 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001839 eis_prop_set = (uint8_t)atoi(eis_prop);
1840
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001841 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1843
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001844 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1845 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001846
Thierry Strudel3d639192016-09-09 11:52:26 -07001847 /* stream configurations */
1848 for (size_t i = 0; i < streamList->num_streams; i++) {
1849 camera3_stream_t *newStream = streamList->streams[i];
1850 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1851 "height = %d, rotation = %d, usage = 0x%x",
1852 i, newStream->stream_type, newStream->format,
1853 newStream->width, newStream->height, newStream->rotation,
1854 newStream->usage);
1855 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1856 newStream->stream_type == CAMERA3_STREAM_INPUT){
1857 isZsl = true;
1858 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001859 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1860 IS_USAGE_PREVIEW(newStream->usage)) {
1861 isPreview = true;
1862 }
1863
Thierry Strudel3d639192016-09-09 11:52:26 -07001864 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1865 inputStream = newStream;
1866 }
1867
Emilian Peev7650c122017-01-19 08:24:33 -08001868 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1869 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001870 isJpeg = true;
1871 jpegSize.width = newStream->width;
1872 jpegSize.height = newStream->height;
1873 if (newStream->width > VIDEO_4K_WIDTH ||
1874 newStream->height > VIDEO_4K_HEIGHT)
1875 bJpegExceeds4K = true;
1876 }
1877
1878 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1879 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1880 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001881 // In HAL3 we can have multiple different video streams.
1882 // The variables video width and height are used below as
1883 // dimensions of the biggest of them
1884 if (videoWidth < newStream->width ||
1885 videoHeight < newStream->height) {
1886 videoWidth = newStream->width;
1887 videoHeight = newStream->height;
1888 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001889 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1890 (VIDEO_4K_HEIGHT <= newStream->height)) {
1891 m_bIs4KVideo = true;
1892 }
1893 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1894 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001895
Thierry Strudel3d639192016-09-09 11:52:26 -07001896 }
1897 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1898 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1899 switch (newStream->format) {
1900 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001901 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1902 depthPresent = true;
1903 break;
1904 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001905 stallStreamCnt++;
1906 if (isOnEncoder(maxViewfinderSize, newStream->width,
1907 newStream->height)) {
1908 numStreamsOnEncoder++;
1909 bJpegOnEncoder = true;
1910 }
1911 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1912 newStream->width);
1913 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1914 newStream->height);;
1915 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1916 "FATAL: max_downscale_factor cannot be zero and so assert");
1917 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1918 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1919 LOGH("Setting small jpeg size flag to true");
1920 bSmallJpegSize = true;
1921 }
1922 break;
1923 case HAL_PIXEL_FORMAT_RAW10:
1924 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1925 case HAL_PIXEL_FORMAT_RAW16:
1926 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001927 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1928 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1929 pdStatCount++;
1930 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001931 break;
1932 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1933 processedStreamCnt++;
1934 if (isOnEncoder(maxViewfinderSize, newStream->width,
1935 newStream->height)) {
1936 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1937 !IS_USAGE_ZSL(newStream->usage)) {
1938 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1939 }
1940 numStreamsOnEncoder++;
1941 }
1942 break;
1943 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1944 processedStreamCnt++;
1945 if (isOnEncoder(maxViewfinderSize, newStream->width,
1946 newStream->height)) {
1947 // If Yuv888 size is not greater than 4K, set feature mask
1948 // to SUPERSET so that it support concurrent request on
1949 // YUV and JPEG.
1950 if (newStream->width <= VIDEO_4K_WIDTH &&
1951 newStream->height <= VIDEO_4K_HEIGHT) {
1952 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1953 }
1954 numStreamsOnEncoder++;
1955 numYuv888OnEncoder++;
1956 largeYuv888Size.width = newStream->width;
1957 largeYuv888Size.height = newStream->height;
1958 }
1959 break;
1960 default:
1961 processedStreamCnt++;
1962 if (isOnEncoder(maxViewfinderSize, newStream->width,
1963 newStream->height)) {
1964 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1965 numStreamsOnEncoder++;
1966 }
1967 break;
1968 }
1969
1970 }
1971 }
1972
1973 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1974 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1975 !m_bIsVideo) {
1976 m_bEisEnable = false;
1977 }
1978
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001979 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1980 pthread_mutex_unlock(&mMutex);
1981 return -EINVAL;
1982 }
1983
Thierry Strudel54dc9782017-02-15 12:12:10 -08001984 uint8_t forceEnableTnr = 0;
1985 char tnr_prop[PROPERTY_VALUE_MAX];
1986 memset(tnr_prop, 0, sizeof(tnr_prop));
1987 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1988 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1989
Thierry Strudel3d639192016-09-09 11:52:26 -07001990 /* Logic to enable/disable TNR based on specific config size/etc.*/
1991 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001992 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1993 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001994 else if (forceEnableTnr)
1995 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001996
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001997 char videoHdrProp[PROPERTY_VALUE_MAX];
1998 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1999 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2000 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2001
2002 if (hdr_mode_prop == 1 && m_bIsVideo &&
2003 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2004 m_bVideoHdrEnabled = true;
2005 else
2006 m_bVideoHdrEnabled = false;
2007
2008
Thierry Strudel3d639192016-09-09 11:52:26 -07002009 /* Check if num_streams is sane */
2010 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2011 rawStreamCnt > MAX_RAW_STREAMS ||
2012 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2013 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2014 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2015 pthread_mutex_unlock(&mMutex);
2016 return -EINVAL;
2017 }
2018 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002019 if (isZsl && m_bIs4KVideo) {
2020 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002021 pthread_mutex_unlock(&mMutex);
2022 return -EINVAL;
2023 }
2024 /* Check if stream sizes are sane */
2025 if (numStreamsOnEncoder > 2) {
2026 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2027 pthread_mutex_unlock(&mMutex);
2028 return -EINVAL;
2029 } else if (1 < numStreamsOnEncoder){
2030 bUseCommonFeatureMask = true;
2031 LOGH("Multiple streams above max viewfinder size, common mask needed");
2032 }
2033
2034 /* Check if BLOB size is greater than 4k in 4k recording case */
2035 if (m_bIs4KVideo && bJpegExceeds4K) {
2036 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2037 pthread_mutex_unlock(&mMutex);
2038 return -EINVAL;
2039 }
2040
Emilian Peev7650c122017-01-19 08:24:33 -08002041 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2042 depthPresent) {
2043 LOGE("HAL doesn't support depth streams in HFR mode!");
2044 pthread_mutex_unlock(&mMutex);
2045 return -EINVAL;
2046 }
2047
Thierry Strudel3d639192016-09-09 11:52:26 -07002048 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2049 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2050 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2051 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2052 // configurations:
2053 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2054 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2055 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2056 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2057 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2058 __func__);
2059 pthread_mutex_unlock(&mMutex);
2060 return -EINVAL;
2061 }
2062
2063 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2064 // the YUV stream's size is greater or equal to the JPEG size, set common
2065 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2066 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2067 jpegSize.width, jpegSize.height) &&
2068 largeYuv888Size.width > jpegSize.width &&
2069 largeYuv888Size.height > jpegSize.height) {
2070 bYuv888OverrideJpeg = true;
2071 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2072 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2073 }
2074
2075 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2076 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2077 commonFeatureMask);
2078 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2079 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2080
2081 rc = validateStreamDimensions(streamList);
2082 if (rc == NO_ERROR) {
2083 rc = validateStreamRotations(streamList);
2084 }
2085 if (rc != NO_ERROR) {
2086 LOGE("Invalid stream configuration requested!");
2087 pthread_mutex_unlock(&mMutex);
2088 return rc;
2089 }
2090
Emilian Peev0f3c3162017-03-15 12:57:46 +00002091 if (1 < pdStatCount) {
2092 LOGE("HAL doesn't support multiple PD streams");
2093 pthread_mutex_unlock(&mMutex);
2094 return -EINVAL;
2095 }
2096
2097 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2098 (1 == pdStatCount)) {
2099 LOGE("HAL doesn't support PD streams in HFR mode!");
2100 pthread_mutex_unlock(&mMutex);
2101 return -EINVAL;
2102 }
2103
Thierry Strudel3d639192016-09-09 11:52:26 -07002104 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2105 for (size_t i = 0; i < streamList->num_streams; i++) {
2106 camera3_stream_t *newStream = streamList->streams[i];
2107 LOGH("newStream type = %d, stream format = %d "
2108 "stream size : %d x %d, stream rotation = %d",
2109 newStream->stream_type, newStream->format,
2110 newStream->width, newStream->height, newStream->rotation);
2111 //if the stream is in the mStreamList validate it
2112 bool stream_exists = false;
2113 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2114 it != mStreamInfo.end(); it++) {
2115 if ((*it)->stream == newStream) {
2116 QCamera3ProcessingChannel *channel =
2117 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2118 stream_exists = true;
2119 if (channel)
2120 delete channel;
2121 (*it)->status = VALID;
2122 (*it)->stream->priv = NULL;
2123 (*it)->channel = NULL;
2124 }
2125 }
2126 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2127 //new stream
2128 stream_info_t* stream_info;
2129 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2130 if (!stream_info) {
2131 LOGE("Could not allocate stream info");
2132 rc = -ENOMEM;
2133 pthread_mutex_unlock(&mMutex);
2134 return rc;
2135 }
2136 stream_info->stream = newStream;
2137 stream_info->status = VALID;
2138 stream_info->channel = NULL;
2139 mStreamInfo.push_back(stream_info);
2140 }
2141 /* Covers Opaque ZSL and API1 F/W ZSL */
2142 if (IS_USAGE_ZSL(newStream->usage)
2143 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2144 if (zslStream != NULL) {
2145 LOGE("Multiple input/reprocess streams requested!");
2146 pthread_mutex_unlock(&mMutex);
2147 return BAD_VALUE;
2148 }
2149 zslStream = newStream;
2150 }
2151 /* Covers YUV reprocess */
2152 if (inputStream != NULL) {
2153 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2154 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2155 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2156 && inputStream->width == newStream->width
2157 && inputStream->height == newStream->height) {
2158 if (zslStream != NULL) {
2159 /* This scenario indicates multiple YUV streams with same size
2160 * as input stream have been requested, since zsl stream handle
2161 * is solely use for the purpose of overriding the size of streams
2162 * which share h/w streams we will just make a guess here as to
2163 * which of the stream is a ZSL stream, this will be refactored
2164 * once we make generic logic for streams sharing encoder output
2165 */
2166 LOGH("Warning, Multiple ip/reprocess streams requested!");
2167 }
2168 zslStream = newStream;
2169 }
2170 }
2171 }
2172
2173 /* If a zsl stream is set, we know that we have configured at least one input or
2174 bidirectional stream */
2175 if (NULL != zslStream) {
2176 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2177 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2178 mInputStreamInfo.format = zslStream->format;
2179 mInputStreamInfo.usage = zslStream->usage;
2180 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2181 mInputStreamInfo.dim.width,
2182 mInputStreamInfo.dim.height,
2183 mInputStreamInfo.format, mInputStreamInfo.usage);
2184 }
2185
2186 cleanAndSortStreamInfo();
2187 if (mMetadataChannel) {
2188 delete mMetadataChannel;
2189 mMetadataChannel = NULL;
2190 }
2191 if (mSupportChannel) {
2192 delete mSupportChannel;
2193 mSupportChannel = NULL;
2194 }
2195
2196 if (mAnalysisChannel) {
2197 delete mAnalysisChannel;
2198 mAnalysisChannel = NULL;
2199 }
2200
2201 if (mDummyBatchChannel) {
2202 delete mDummyBatchChannel;
2203 mDummyBatchChannel = NULL;
2204 }
2205
Emilian Peev7650c122017-01-19 08:24:33 -08002206 if (mDepthChannel) {
2207 mDepthChannel = NULL;
2208 }
2209
Thierry Strudel2896d122017-02-23 19:18:03 -08002210 char is_type_value[PROPERTY_VALUE_MAX];
2211 property_get("persist.camera.is_type", is_type_value, "4");
2212 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2213
Binhao Line406f062017-05-03 14:39:44 -07002214 char property_value[PROPERTY_VALUE_MAX];
2215 property_get("persist.camera.gzoom.at", property_value, "0");
2216 int goog_zoom_at = atoi(property_value);
2217 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2218 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2219
2220 property_get("persist.camera.gzoom.4k", property_value, "0");
2221 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2222
Thierry Strudel3d639192016-09-09 11:52:26 -07002223 //Create metadata channel and initialize it
2224 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2225 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2226 gCamCapability[mCameraId]->color_arrangement);
2227 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2228 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002229 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002230 if (mMetadataChannel == NULL) {
2231 LOGE("failed to allocate metadata channel");
2232 rc = -ENOMEM;
2233 pthread_mutex_unlock(&mMutex);
2234 return rc;
2235 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002236 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002237 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2238 if (rc < 0) {
2239 LOGE("metadata channel initialization failed");
2240 delete mMetadataChannel;
2241 mMetadataChannel = NULL;
2242 pthread_mutex_unlock(&mMutex);
2243 return rc;
2244 }
2245
Thierry Strudel2896d122017-02-23 19:18:03 -08002246 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002247 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002248 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002249 // Keep track of preview/video streams indices.
2250 // There could be more than one preview streams, but only one video stream.
2251 int32_t video_stream_idx = -1;
2252 int32_t preview_stream_idx[streamList->num_streams];
2253 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002254 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2255 /* Allocate channel objects for the requested streams */
2256 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002257
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 camera3_stream_t *newStream = streamList->streams[i];
2259 uint32_t stream_usage = newStream->usage;
2260 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2261 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2262 struct camera_info *p_info = NULL;
2263 pthread_mutex_lock(&gCamLock);
2264 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2265 pthread_mutex_unlock(&gCamLock);
2266 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2267 || IS_USAGE_ZSL(newStream->usage)) &&
2268 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002269 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002270 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002271 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2272 if (bUseCommonFeatureMask)
2273 zsl_ppmask = commonFeatureMask;
2274 else
2275 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002277 if (numStreamsOnEncoder > 0)
2278 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2279 else
2280 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002281 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002282 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002283 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002284 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002285 LOGH("Input stream configured, reprocess config");
2286 } else {
2287 //for non zsl streams find out the format
2288 switch (newStream->format) {
2289 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2290 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002291 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002292 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2293 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2294 /* add additional features to pp feature mask */
2295 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2296 mStreamConfigInfo.num_streams);
2297
2298 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2299 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2300 CAM_STREAM_TYPE_VIDEO;
2301 if (m_bTnrEnabled && m_bTnrVideo) {
2302 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2303 CAM_QCOM_FEATURE_CPP_TNR;
2304 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2305 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2306 ~CAM_QCOM_FEATURE_CDS;
2307 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002308 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2309 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2310 CAM_QTI_FEATURE_PPEISCORE;
2311 }
Binhao Line406f062017-05-03 14:39:44 -07002312 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2313 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2314 CAM_QCOM_FEATURE_GOOG_ZOOM;
2315 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002316 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002317 } else {
2318 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2319 CAM_STREAM_TYPE_PREVIEW;
2320 if (m_bTnrEnabled && m_bTnrPreview) {
2321 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2322 CAM_QCOM_FEATURE_CPP_TNR;
2323 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2324 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2325 ~CAM_QCOM_FEATURE_CDS;
2326 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002327 if(!m_bSwTnrPreview) {
2328 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2329 ~CAM_QTI_FEATURE_SW_TNR;
2330 }
Binhao Line406f062017-05-03 14:39:44 -07002331 if (is_goog_zoom_preview_enabled) {
2332 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2333 CAM_QCOM_FEATURE_GOOG_ZOOM;
2334 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002335 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002336 padding_info.width_padding = mSurfaceStridePadding;
2337 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002338 previewSize.width = (int32_t)newStream->width;
2339 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002340 }
2341 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2342 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2343 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2344 newStream->height;
2345 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2346 newStream->width;
2347 }
2348 }
2349 break;
2350 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002351 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002352 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2353 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2354 if (bUseCommonFeatureMask)
2355 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2356 commonFeatureMask;
2357 else
2358 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2359 CAM_QCOM_FEATURE_NONE;
2360 } else {
2361 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2362 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2363 }
2364 break;
2365 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002366 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002367 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2368 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2369 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2370 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2371 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002372 /* Remove rotation if it is not supported
2373 for 4K LiveVideo snapshot case (online processing) */
2374 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2375 CAM_QCOM_FEATURE_ROTATION)) {
2376 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2377 &= ~CAM_QCOM_FEATURE_ROTATION;
2378 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002379 } else {
2380 if (bUseCommonFeatureMask &&
2381 isOnEncoder(maxViewfinderSize, newStream->width,
2382 newStream->height)) {
2383 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2384 } else {
2385 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2386 }
2387 }
2388 if (isZsl) {
2389 if (zslStream) {
2390 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2391 (int32_t)zslStream->width;
2392 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2393 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002394 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2395 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002396 } else {
2397 LOGE("Error, No ZSL stream identified");
2398 pthread_mutex_unlock(&mMutex);
2399 return -EINVAL;
2400 }
2401 } else if (m_bIs4KVideo) {
2402 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2403 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2404 } else if (bYuv888OverrideJpeg) {
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2406 (int32_t)largeYuv888Size.width;
2407 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2408 (int32_t)largeYuv888Size.height;
2409 }
2410 break;
2411 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2412 case HAL_PIXEL_FORMAT_RAW16:
2413 case HAL_PIXEL_FORMAT_RAW10:
2414 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2415 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2416 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002417 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2418 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2419 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2420 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2421 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2422 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2423 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2424 gCamCapability[mCameraId]->dt[mPDIndex];
2425 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2426 gCamCapability[mCameraId]->vc[mPDIndex];
2427 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 break;
2429 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002430 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002431 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2432 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2433 break;
2434 }
2435 }
2436
2437 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2438 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2439 gCamCapability[mCameraId]->color_arrangement);
2440
2441 if (newStream->priv == NULL) {
2442 //New stream, construct channel
2443 switch (newStream->stream_type) {
2444 case CAMERA3_STREAM_INPUT:
2445 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2446 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2447 break;
2448 case CAMERA3_STREAM_BIDIRECTIONAL:
2449 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2450 GRALLOC_USAGE_HW_CAMERA_WRITE;
2451 break;
2452 case CAMERA3_STREAM_OUTPUT:
2453 /* For video encoding stream, set read/write rarely
2454 * flag so that they may be set to un-cached */
2455 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2456 newStream->usage |=
2457 (GRALLOC_USAGE_SW_READ_RARELY |
2458 GRALLOC_USAGE_SW_WRITE_RARELY |
2459 GRALLOC_USAGE_HW_CAMERA_WRITE);
2460 else if (IS_USAGE_ZSL(newStream->usage))
2461 {
2462 LOGD("ZSL usage flag skipping");
2463 }
2464 else if (newStream == zslStream
2465 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2466 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2467 } else
2468 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2469 break;
2470 default:
2471 LOGE("Invalid stream_type %d", newStream->stream_type);
2472 break;
2473 }
2474
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002475 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002476 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2477 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2478 QCamera3ProcessingChannel *channel = NULL;
2479 switch (newStream->format) {
2480 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2481 if ((newStream->usage &
2482 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2483 (streamList->operation_mode ==
2484 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2485 ) {
2486 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2487 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002488 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 this,
2490 newStream,
2491 (cam_stream_type_t)
2492 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2493 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2494 mMetadataChannel,
2495 0); //heap buffers are not required for HFR video channel
2496 if (channel == NULL) {
2497 LOGE("allocation of channel failed");
2498 pthread_mutex_unlock(&mMutex);
2499 return -ENOMEM;
2500 }
2501 //channel->getNumBuffers() will return 0 here so use
2502 //MAX_INFLIGH_HFR_REQUESTS
2503 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2504 newStream->priv = channel;
2505 LOGI("num video buffers in HFR mode: %d",
2506 MAX_INFLIGHT_HFR_REQUESTS);
2507 } else {
2508 /* Copy stream contents in HFR preview only case to create
2509 * dummy batch channel so that sensor streaming is in
2510 * HFR mode */
2511 if (!m_bIsVideo && (streamList->operation_mode ==
2512 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2513 mDummyBatchStream = *newStream;
2514 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 int bufferCount = MAX_INFLIGHT_REQUESTS;
2516 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2517 CAM_STREAM_TYPE_VIDEO) {
2518 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2519 bufferCount = MAX_VIDEO_BUFFERS;
2520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002521 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2522 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002523 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002524 this,
2525 newStream,
2526 (cam_stream_type_t)
2527 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2529 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002530 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002531 if (channel == NULL) {
2532 LOGE("allocation of channel failed");
2533 pthread_mutex_unlock(&mMutex);
2534 return -ENOMEM;
2535 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002536 /* disable UBWC for preview, though supported,
2537 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002538 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002539 (previewSize.width == (int32_t)videoWidth)&&
2540 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002541 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002542 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002543 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002544 /* When goog_zoom is linked to the preview or video stream,
2545 * disable ubwc to the linked stream */
2546 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2547 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2548 channel->setUBWCEnabled(false);
2549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002550 newStream->max_buffers = channel->getNumBuffers();
2551 newStream->priv = channel;
2552 }
2553 break;
2554 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2555 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2556 mChannelHandle,
2557 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002558 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002559 this,
2560 newStream,
2561 (cam_stream_type_t)
2562 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2563 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2564 mMetadataChannel);
2565 if (channel == NULL) {
2566 LOGE("allocation of YUV channel failed");
2567 pthread_mutex_unlock(&mMutex);
2568 return -ENOMEM;
2569 }
2570 newStream->max_buffers = channel->getNumBuffers();
2571 newStream->priv = channel;
2572 break;
2573 }
2574 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2575 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002576 case HAL_PIXEL_FORMAT_RAW10: {
2577 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2578 (HAL_DATASPACE_DEPTH != newStream->data_space))
2579 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002580 mRawChannel = new QCamera3RawChannel(
2581 mCameraHandle->camera_handle, mChannelHandle,
2582 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002583 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002584 this, newStream,
2585 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002586 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002587 if (mRawChannel == NULL) {
2588 LOGE("allocation of raw channel failed");
2589 pthread_mutex_unlock(&mMutex);
2590 return -ENOMEM;
2591 }
2592 newStream->max_buffers = mRawChannel->getNumBuffers();
2593 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2594 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002595 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002596 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002597 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2598 mDepthChannel = new QCamera3DepthChannel(
2599 mCameraHandle->camera_handle, mChannelHandle,
2600 mCameraHandle->ops, NULL, NULL, &padding_info,
2601 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2602 mMetadataChannel);
2603 if (NULL == mDepthChannel) {
2604 LOGE("Allocation of depth channel failed");
2605 pthread_mutex_unlock(&mMutex);
2606 return NO_MEMORY;
2607 }
2608 newStream->priv = mDepthChannel;
2609 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2610 } else {
2611 // Max live snapshot inflight buffer is 1. This is to mitigate
2612 // frame drop issues for video snapshot. The more buffers being
2613 // allocated, the more frame drops there are.
2614 mPictureChannel = new QCamera3PicChannel(
2615 mCameraHandle->camera_handle, mChannelHandle,
2616 mCameraHandle->ops, captureResultCb,
2617 setBufferErrorStatus, &padding_info, this, newStream,
2618 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2619 m_bIs4KVideo, isZsl, mMetadataChannel,
2620 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2621 if (mPictureChannel == NULL) {
2622 LOGE("allocation of channel failed");
2623 pthread_mutex_unlock(&mMutex);
2624 return -ENOMEM;
2625 }
2626 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2627 newStream->max_buffers = mPictureChannel->getNumBuffers();
2628 mPictureChannel->overrideYuvSize(
2629 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2630 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002631 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 break;
2633
2634 default:
2635 LOGE("not a supported format 0x%x", newStream->format);
2636 break;
2637 }
2638 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2639 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2640 } else {
2641 LOGE("Error, Unknown stream type");
2642 pthread_mutex_unlock(&mMutex);
2643 return -EINVAL;
2644 }
2645
2646 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002647 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2648 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002649 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002650 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002651 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2652 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2653 }
2654 }
2655
2656 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2657 it != mStreamInfo.end(); it++) {
2658 if ((*it)->stream == newStream) {
2659 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2660 break;
2661 }
2662 }
2663 } else {
2664 // Channel already exists for this stream
2665 // Do nothing for now
2666 }
2667 padding_info = gCamCapability[mCameraId]->padding_info;
2668
Emilian Peev7650c122017-01-19 08:24:33 -08002669 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002670 * since there is no real stream associated with it
2671 */
Emilian Peev7650c122017-01-19 08:24:33 -08002672 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002673 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2674 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002675 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002676 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002677 }
2678
Binhao Lincdb362a2017-04-20 13:31:54 -07002679 // By default, preview stream TNR is disabled.
2680 // Enable TNR to the preview stream if all conditions below are satisfied:
2681 // 1. resolution <= 1080p.
2682 // 2. preview resolution == video resolution.
2683 // 3. video stream TNR is enabled.
2684 // 4. EIS2.0
2685 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2686 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2687 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2688 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2689 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2690 video_stream->width == preview_stream->width &&
2691 video_stream->height == preview_stream->height) {
2692 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2693 CAM_QCOM_FEATURE_CPP_TNR;
2694 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2695 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2696 ~CAM_QCOM_FEATURE_CDS;
2697 }
2698 }
2699
Thierry Strudel2896d122017-02-23 19:18:03 -08002700 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2701 onlyRaw = false;
2702 }
2703
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002704 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002705 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002706 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002707 cam_analysis_info_t analysisInfo;
2708 int32_t ret = NO_ERROR;
2709 ret = mCommon.getAnalysisInfo(
2710 FALSE,
2711 analysisFeatureMask,
2712 &analysisInfo);
2713 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002714 cam_color_filter_arrangement_t analysis_color_arrangement =
2715 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2716 CAM_FILTER_ARRANGEMENT_Y :
2717 gCamCapability[mCameraId]->color_arrangement);
2718 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2719 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002720 cam_dimension_t analysisDim;
2721 analysisDim = mCommon.getMatchingDimension(previewSize,
2722 analysisInfo.analysis_recommended_res);
2723
2724 mAnalysisChannel = new QCamera3SupportChannel(
2725 mCameraHandle->camera_handle,
2726 mChannelHandle,
2727 mCameraHandle->ops,
2728 &analysisInfo.analysis_padding_info,
2729 analysisFeatureMask,
2730 CAM_STREAM_TYPE_ANALYSIS,
2731 &analysisDim,
2732 (analysisInfo.analysis_format
2733 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2734 : CAM_FORMAT_YUV_420_NV21),
2735 analysisInfo.hw_analysis_supported,
2736 gCamCapability[mCameraId]->color_arrangement,
2737 this,
2738 0); // force buffer count to 0
2739 } else {
2740 LOGW("getAnalysisInfo failed, ret = %d", ret);
2741 }
2742 if (!mAnalysisChannel) {
2743 LOGW("Analysis channel cannot be created");
2744 }
2745 }
2746
Thierry Strudel3d639192016-09-09 11:52:26 -07002747 //RAW DUMP channel
2748 if (mEnableRawDump && isRawStreamRequested == false){
2749 cam_dimension_t rawDumpSize;
2750 rawDumpSize = getMaxRawSize(mCameraId);
2751 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2752 setPAAFSupport(rawDumpFeatureMask,
2753 CAM_STREAM_TYPE_RAW,
2754 gCamCapability[mCameraId]->color_arrangement);
2755 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2756 mChannelHandle,
2757 mCameraHandle->ops,
2758 rawDumpSize,
2759 &padding_info,
2760 this, rawDumpFeatureMask);
2761 if (!mRawDumpChannel) {
2762 LOGE("Raw Dump channel cannot be created");
2763 pthread_mutex_unlock(&mMutex);
2764 return -ENOMEM;
2765 }
2766 }
2767
Thierry Strudel3d639192016-09-09 11:52:26 -07002768 if (mAnalysisChannel) {
2769 cam_analysis_info_t analysisInfo;
2770 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2771 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2772 CAM_STREAM_TYPE_ANALYSIS;
2773 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2774 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002775 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002776 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2777 &analysisInfo);
2778 if (rc != NO_ERROR) {
2779 LOGE("getAnalysisInfo failed, ret = %d", rc);
2780 pthread_mutex_unlock(&mMutex);
2781 return rc;
2782 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002783 cam_color_filter_arrangement_t analysis_color_arrangement =
2784 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2785 CAM_FILTER_ARRANGEMENT_Y :
2786 gCamCapability[mCameraId]->color_arrangement);
2787 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2788 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2789 analysis_color_arrangement);
2790
Thierry Strudel3d639192016-09-09 11:52:26 -07002791 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002792 mCommon.getMatchingDimension(previewSize,
2793 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002794 mStreamConfigInfo.num_streams++;
2795 }
2796
Thierry Strudel2896d122017-02-23 19:18:03 -08002797 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002798 cam_analysis_info_t supportInfo;
2799 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2800 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2801 setPAAFSupport(callbackFeatureMask,
2802 CAM_STREAM_TYPE_CALLBACK,
2803 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002804 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002805 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002806 if (ret != NO_ERROR) {
2807 /* Ignore the error for Mono camera
2808 * because the PAAF bit mask is only set
2809 * for CAM_STREAM_TYPE_ANALYSIS stream type
2810 */
2811 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2812 LOGW("getAnalysisInfo failed, ret = %d", ret);
2813 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002814 }
2815 mSupportChannel = new QCamera3SupportChannel(
2816 mCameraHandle->camera_handle,
2817 mChannelHandle,
2818 mCameraHandle->ops,
2819 &gCamCapability[mCameraId]->padding_info,
2820 callbackFeatureMask,
2821 CAM_STREAM_TYPE_CALLBACK,
2822 &QCamera3SupportChannel::kDim,
2823 CAM_FORMAT_YUV_420_NV21,
2824 supportInfo.hw_analysis_supported,
2825 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002826 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002827 if (!mSupportChannel) {
2828 LOGE("dummy channel cannot be created");
2829 pthread_mutex_unlock(&mMutex);
2830 return -ENOMEM;
2831 }
2832 }
2833
2834 if (mSupportChannel) {
2835 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2836 QCamera3SupportChannel::kDim;
2837 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2838 CAM_STREAM_TYPE_CALLBACK;
2839 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2840 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2841 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2842 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2843 gCamCapability[mCameraId]->color_arrangement);
2844 mStreamConfigInfo.num_streams++;
2845 }
2846
2847 if (mRawDumpChannel) {
2848 cam_dimension_t rawSize;
2849 rawSize = getMaxRawSize(mCameraId);
2850 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2851 rawSize;
2852 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2853 CAM_STREAM_TYPE_RAW;
2854 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2855 CAM_QCOM_FEATURE_NONE;
2856 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2857 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2858 gCamCapability[mCameraId]->color_arrangement);
2859 mStreamConfigInfo.num_streams++;
2860 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002861
2862 if (mHdrPlusRawSrcChannel) {
2863 cam_dimension_t rawSize;
2864 rawSize = getMaxRawSize(mCameraId);
2865 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2866 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2867 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2868 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2869 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2870 gCamCapability[mCameraId]->color_arrangement);
2871 mStreamConfigInfo.num_streams++;
2872 }
2873
Thierry Strudel3d639192016-09-09 11:52:26 -07002874 /* In HFR mode, if video stream is not added, create a dummy channel so that
2875 * ISP can create a batch mode even for preview only case. This channel is
2876 * never 'start'ed (no stream-on), it is only 'initialized' */
2877 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2878 !m_bIsVideo) {
2879 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2880 setPAAFSupport(dummyFeatureMask,
2881 CAM_STREAM_TYPE_VIDEO,
2882 gCamCapability[mCameraId]->color_arrangement);
2883 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2884 mChannelHandle,
2885 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002886 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002887 this,
2888 &mDummyBatchStream,
2889 CAM_STREAM_TYPE_VIDEO,
2890 dummyFeatureMask,
2891 mMetadataChannel);
2892 if (NULL == mDummyBatchChannel) {
2893 LOGE("creation of mDummyBatchChannel failed."
2894 "Preview will use non-hfr sensor mode ");
2895 }
2896 }
2897 if (mDummyBatchChannel) {
2898 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2899 mDummyBatchStream.width;
2900 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2901 mDummyBatchStream.height;
2902 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2903 CAM_STREAM_TYPE_VIDEO;
2904 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2905 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2906 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2907 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2908 gCamCapability[mCameraId]->color_arrangement);
2909 mStreamConfigInfo.num_streams++;
2910 }
2911
2912 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2913 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002914 m_bIs4KVideo ? 0 :
2915 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002916
2917 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2918 for (pendingRequestIterator i = mPendingRequestsList.begin();
2919 i != mPendingRequestsList.end();) {
2920 i = erasePendingRequest(i);
2921 }
2922 mPendingFrameDropList.clear();
2923 // Initialize/Reset the pending buffers list
2924 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2925 req.mPendingBufferList.clear();
2926 }
2927 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2928
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 mCurJpegMeta.clear();
2930 //Get min frame duration for this streams configuration
2931 deriveMinFrameDuration();
2932
Chien-Yu Chenee335912017-02-09 17:53:20 -08002933 mFirstPreviewIntentSeen = false;
2934
2935 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002936 {
2937 Mutex::Autolock l(gHdrPlusClientLock);
2938 disableHdrPlusModeLocked();
2939 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002940
Thierry Strudel3d639192016-09-09 11:52:26 -07002941 // Update state
2942 mState = CONFIGURED;
2943
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002944 mFirstMetadataCallback = true;
2945
Thierry Strudel3d639192016-09-09 11:52:26 -07002946 pthread_mutex_unlock(&mMutex);
2947
2948 return rc;
2949}
2950
2951/*===========================================================================
2952 * FUNCTION : validateCaptureRequest
2953 *
2954 * DESCRIPTION: validate a capture request from camera service
2955 *
2956 * PARAMETERS :
2957 * @request : request from framework to process
2958 *
2959 * RETURN :
2960 *
2961 *==========================================================================*/
2962int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002963 camera3_capture_request_t *request,
2964 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002965{
2966 ssize_t idx = 0;
2967 const camera3_stream_buffer_t *b;
2968 CameraMetadata meta;
2969
2970 /* Sanity check the request */
2971 if (request == NULL) {
2972 LOGE("NULL capture request");
2973 return BAD_VALUE;
2974 }
2975
2976 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2977 /*settings cannot be null for the first request*/
2978 return BAD_VALUE;
2979 }
2980
2981 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002982 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2983 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002984 LOGE("Request %d: No output buffers provided!",
2985 __FUNCTION__, frameNumber);
2986 return BAD_VALUE;
2987 }
2988 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2989 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2990 request->num_output_buffers, MAX_NUM_STREAMS);
2991 return BAD_VALUE;
2992 }
2993 if (request->input_buffer != NULL) {
2994 b = request->input_buffer;
2995 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2996 LOGE("Request %d: Buffer %ld: Status not OK!",
2997 frameNumber, (long)idx);
2998 return BAD_VALUE;
2999 }
3000 if (b->release_fence != -1) {
3001 LOGE("Request %d: Buffer %ld: Has a release fence!",
3002 frameNumber, (long)idx);
3003 return BAD_VALUE;
3004 }
3005 if (b->buffer == NULL) {
3006 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3007 frameNumber, (long)idx);
3008 return BAD_VALUE;
3009 }
3010 }
3011
3012 // Validate all buffers
3013 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003014 if (b == NULL) {
3015 return BAD_VALUE;
3016 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003017 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003018 QCamera3ProcessingChannel *channel =
3019 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3020 if (channel == NULL) {
3021 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3022 frameNumber, (long)idx);
3023 return BAD_VALUE;
3024 }
3025 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3026 LOGE("Request %d: Buffer %ld: Status not OK!",
3027 frameNumber, (long)idx);
3028 return BAD_VALUE;
3029 }
3030 if (b->release_fence != -1) {
3031 LOGE("Request %d: Buffer %ld: Has a release fence!",
3032 frameNumber, (long)idx);
3033 return BAD_VALUE;
3034 }
3035 if (b->buffer == NULL) {
3036 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3037 frameNumber, (long)idx);
3038 return BAD_VALUE;
3039 }
3040 if (*(b->buffer) == NULL) {
3041 LOGE("Request %d: Buffer %ld: NULL private handle!",
3042 frameNumber, (long)idx);
3043 return BAD_VALUE;
3044 }
3045 idx++;
3046 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003047 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003048 return NO_ERROR;
3049}
3050
3051/*===========================================================================
3052 * FUNCTION : deriveMinFrameDuration
3053 *
3054 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3055 * on currently configured streams.
3056 *
3057 * PARAMETERS : NONE
3058 *
3059 * RETURN : NONE
3060 *
3061 *==========================================================================*/
3062void QCamera3HardwareInterface::deriveMinFrameDuration()
3063{
3064 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3065
3066 maxJpegDim = 0;
3067 maxProcessedDim = 0;
3068 maxRawDim = 0;
3069
3070 // Figure out maximum jpeg, processed, and raw dimensions
3071 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3072 it != mStreamInfo.end(); it++) {
3073
3074 // Input stream doesn't have valid stream_type
3075 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3076 continue;
3077
3078 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3079 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3080 if (dimension > maxJpegDim)
3081 maxJpegDim = dimension;
3082 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3083 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3084 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3085 if (dimension > maxRawDim)
3086 maxRawDim = dimension;
3087 } else {
3088 if (dimension > maxProcessedDim)
3089 maxProcessedDim = dimension;
3090 }
3091 }
3092
3093 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3094 MAX_SIZES_CNT);
3095
3096 //Assume all jpeg dimensions are in processed dimensions.
3097 if (maxJpegDim > maxProcessedDim)
3098 maxProcessedDim = maxJpegDim;
3099 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3100 if (maxProcessedDim > maxRawDim) {
3101 maxRawDim = INT32_MAX;
3102
3103 for (size_t i = 0; i < count; i++) {
3104 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3105 gCamCapability[mCameraId]->raw_dim[i].height;
3106 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3107 maxRawDim = dimension;
3108 }
3109 }
3110
3111 //Find minimum durations for processed, jpeg, and raw
3112 for (size_t i = 0; i < count; i++) {
3113 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3114 gCamCapability[mCameraId]->raw_dim[i].height) {
3115 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3116 break;
3117 }
3118 }
3119 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3120 for (size_t i = 0; i < count; i++) {
3121 if (maxProcessedDim ==
3122 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3123 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3124 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3125 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3126 break;
3127 }
3128 }
3129}
3130
3131/*===========================================================================
3132 * FUNCTION : getMinFrameDuration
3133 *
3134 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3135 * and current request configuration.
3136 *
3137 * PARAMETERS : @request: requset sent by the frameworks
3138 *
3139 * RETURN : min farme duration for a particular request
3140 *
3141 *==========================================================================*/
3142int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3143{
3144 bool hasJpegStream = false;
3145 bool hasRawStream = false;
3146 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3147 const camera3_stream_t *stream = request->output_buffers[i].stream;
3148 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3149 hasJpegStream = true;
3150 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3151 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3152 stream->format == HAL_PIXEL_FORMAT_RAW16)
3153 hasRawStream = true;
3154 }
3155
3156 if (!hasJpegStream)
3157 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3158 else
3159 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3160}
3161
3162/*===========================================================================
3163 * FUNCTION : handleBuffersDuringFlushLock
3164 *
3165 * DESCRIPTION: Account for buffers returned from back-end during flush
3166 * This function is executed while mMutex is held by the caller.
3167 *
3168 * PARAMETERS :
3169 * @buffer: image buffer for the callback
3170 *
3171 * RETURN :
3172 *==========================================================================*/
3173void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3174{
3175 bool buffer_found = false;
3176 for (List<PendingBuffersInRequest>::iterator req =
3177 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3178 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3179 for (List<PendingBufferInfo>::iterator i =
3180 req->mPendingBufferList.begin();
3181 i != req->mPendingBufferList.end(); i++) {
3182 if (i->buffer == buffer->buffer) {
3183 mPendingBuffersMap.numPendingBufsAtFlush--;
3184 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3185 buffer->buffer, req->frame_number,
3186 mPendingBuffersMap.numPendingBufsAtFlush);
3187 buffer_found = true;
3188 break;
3189 }
3190 }
3191 if (buffer_found) {
3192 break;
3193 }
3194 }
3195 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3196 //signal the flush()
3197 LOGD("All buffers returned to HAL. Continue flush");
3198 pthread_cond_signal(&mBuffersCond);
3199 }
3200}
3201
Thierry Strudel3d639192016-09-09 11:52:26 -07003202/*===========================================================================
3203 * FUNCTION : handleBatchMetadata
3204 *
3205 * DESCRIPTION: Handles metadata buffer callback in batch mode
3206 *
3207 * PARAMETERS : @metadata_buf: metadata buffer
3208 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3209 * the meta buf in this method
3210 *
3211 * RETURN :
3212 *
3213 *==========================================================================*/
3214void QCamera3HardwareInterface::handleBatchMetadata(
3215 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3216{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003217 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003218
3219 if (NULL == metadata_buf) {
3220 LOGE("metadata_buf is NULL");
3221 return;
3222 }
3223 /* In batch mode, the metdata will contain the frame number and timestamp of
3224 * the last frame in the batch. Eg: a batch containing buffers from request
3225 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3226 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3227 * multiple process_capture_results */
3228 metadata_buffer_t *metadata =
3229 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3230 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3231 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3232 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3233 uint32_t frame_number = 0, urgent_frame_number = 0;
3234 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3235 bool invalid_metadata = false;
3236 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3237 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003238 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003239
3240 int32_t *p_frame_number_valid =
3241 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3242 uint32_t *p_frame_number =
3243 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3244 int64_t *p_capture_time =
3245 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3246 int32_t *p_urgent_frame_number_valid =
3247 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3248 uint32_t *p_urgent_frame_number =
3249 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3250
3251 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3252 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3253 (NULL == p_urgent_frame_number)) {
3254 LOGE("Invalid metadata");
3255 invalid_metadata = true;
3256 } else {
3257 frame_number_valid = *p_frame_number_valid;
3258 last_frame_number = *p_frame_number;
3259 last_frame_capture_time = *p_capture_time;
3260 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3261 last_urgent_frame_number = *p_urgent_frame_number;
3262 }
3263
3264 /* In batchmode, when no video buffers are requested, set_parms are sent
3265 * for every capture_request. The difference between consecutive urgent
3266 * frame numbers and frame numbers should be used to interpolate the
3267 * corresponding frame numbers and time stamps */
3268 pthread_mutex_lock(&mMutex);
3269 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003270 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3271 if(idx < 0) {
3272 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3273 last_urgent_frame_number);
3274 mState = ERROR;
3275 pthread_mutex_unlock(&mMutex);
3276 return;
3277 }
3278 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003279 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3280 first_urgent_frame_number;
3281
3282 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3283 urgent_frame_number_valid,
3284 first_urgent_frame_number, last_urgent_frame_number);
3285 }
3286
3287 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003288 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3289 if(idx < 0) {
3290 LOGE("Invalid frame number received: %d. Irrecoverable error",
3291 last_frame_number);
3292 mState = ERROR;
3293 pthread_mutex_unlock(&mMutex);
3294 return;
3295 }
3296 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003297 frameNumDiff = last_frame_number + 1 -
3298 first_frame_number;
3299 mPendingBatchMap.removeItem(last_frame_number);
3300
3301 LOGD("frm: valid: %d frm_num: %d - %d",
3302 frame_number_valid,
3303 first_frame_number, last_frame_number);
3304
3305 }
3306 pthread_mutex_unlock(&mMutex);
3307
3308 if (urgent_frame_number_valid || frame_number_valid) {
3309 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3310 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3311 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3312 urgentFrameNumDiff, last_urgent_frame_number);
3313 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3314 LOGE("frameNumDiff: %d frameNum: %d",
3315 frameNumDiff, last_frame_number);
3316 }
3317
3318 for (size_t i = 0; i < loopCount; i++) {
3319 /* handleMetadataWithLock is called even for invalid_metadata for
3320 * pipeline depth calculation */
3321 if (!invalid_metadata) {
3322 /* Infer frame number. Batch metadata contains frame number of the
3323 * last frame */
3324 if (urgent_frame_number_valid) {
3325 if (i < urgentFrameNumDiff) {
3326 urgent_frame_number =
3327 first_urgent_frame_number + i;
3328 LOGD("inferred urgent frame_number: %d",
3329 urgent_frame_number);
3330 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3331 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3332 } else {
3333 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3334 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3335 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3336 }
3337 }
3338
3339 /* Infer frame number. Batch metadata contains frame number of the
3340 * last frame */
3341 if (frame_number_valid) {
3342 if (i < frameNumDiff) {
3343 frame_number = first_frame_number + i;
3344 LOGD("inferred frame_number: %d", frame_number);
3345 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3346 CAM_INTF_META_FRAME_NUMBER, frame_number);
3347 } else {
3348 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3349 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3350 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3351 }
3352 }
3353
3354 if (last_frame_capture_time) {
3355 //Infer timestamp
3356 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003357 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003358 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003359 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003360 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3361 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3362 LOGD("batch capture_time: %lld, capture_time: %lld",
3363 last_frame_capture_time, capture_time);
3364 }
3365 }
3366 pthread_mutex_lock(&mMutex);
3367 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003368 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003369 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3370 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003371 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003372 pthread_mutex_unlock(&mMutex);
3373 }
3374
3375 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003376 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003377 mMetadataChannel->bufDone(metadata_buf);
3378 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003379 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003380 }
3381}
3382
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003383void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3384 camera3_error_msg_code_t errorCode)
3385{
3386 camera3_notify_msg_t notify_msg;
3387 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3388 notify_msg.type = CAMERA3_MSG_ERROR;
3389 notify_msg.message.error.error_code = errorCode;
3390 notify_msg.message.error.error_stream = NULL;
3391 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003392 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003393
3394 return;
3395}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003396
3397/*===========================================================================
3398 * FUNCTION : sendPartialMetadataWithLock
3399 *
3400 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3401 *
3402 * PARAMETERS : @metadata: metadata buffer
3403 * @requestIter: The iterator for the pending capture request for
3404 * which the partial result is being sen
3405 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3406 * last urgent metadata in a batch. Always true for non-batch mode
3407 *
3408 * RETURN :
3409 *
3410 *==========================================================================*/
3411
3412void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3413 metadata_buffer_t *metadata,
3414 const pendingRequestIterator requestIter,
3415 bool lastUrgentMetadataInBatch)
3416{
3417 camera3_capture_result_t result;
3418 memset(&result, 0, sizeof(camera3_capture_result_t));
3419
3420 requestIter->partial_result_cnt++;
3421
3422 // Extract 3A metadata
3423 result.result = translateCbUrgentMetadataToResultMetadata(
3424 metadata, lastUrgentMetadataInBatch);
3425 // Populate metadata result
3426 result.frame_number = requestIter->frame_number;
3427 result.num_output_buffers = 0;
3428 result.output_buffers = NULL;
3429 result.partial_result = requestIter->partial_result_cnt;
3430
3431 {
3432 Mutex::Autolock l(gHdrPlusClientLock);
3433 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3434 // Notify HDR+ client about the partial metadata.
3435 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3436 result.partial_result == PARTIAL_RESULT_COUNT);
3437 }
3438 }
3439
3440 orchestrateResult(&result);
3441 LOGD("urgent frame_number = %u", result.frame_number);
3442 free_camera_metadata((camera_metadata_t *)result.result);
3443}
3444
Thierry Strudel3d639192016-09-09 11:52:26 -07003445/*===========================================================================
3446 * FUNCTION : handleMetadataWithLock
3447 *
3448 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3449 *
3450 * PARAMETERS : @metadata_buf: metadata buffer
3451 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3452 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003453 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3454 * last urgent metadata in a batch. Always true for non-batch mode
3455 * @lastMetadataInBatch: Boolean to indicate whether this is the
3456 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003457 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3458 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003459 *
3460 * RETURN :
3461 *
3462 *==========================================================================*/
3463void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003464 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003465 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3466 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003467{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003468 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003469 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3470 //during flush do not send metadata from this thread
3471 LOGD("not sending metadata during flush or when mState is error");
3472 if (free_and_bufdone_meta_buf) {
3473 mMetadataChannel->bufDone(metadata_buf);
3474 free(metadata_buf);
3475 }
3476 return;
3477 }
3478
3479 //not in flush
3480 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3481 int32_t frame_number_valid, urgent_frame_number_valid;
3482 uint32_t frame_number, urgent_frame_number;
3483 int64_t capture_time;
3484 nsecs_t currentSysTime;
3485
3486 int32_t *p_frame_number_valid =
3487 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3488 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3489 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3490 int32_t *p_urgent_frame_number_valid =
3491 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3492 uint32_t *p_urgent_frame_number =
3493 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3494 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3495 metadata) {
3496 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3497 *p_frame_number_valid, *p_frame_number);
3498 }
3499
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003500 camera_metadata_t *resultMetadata = nullptr;
3501
Thierry Strudel3d639192016-09-09 11:52:26 -07003502 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3503 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3504 LOGE("Invalid metadata");
3505 if (free_and_bufdone_meta_buf) {
3506 mMetadataChannel->bufDone(metadata_buf);
3507 free(metadata_buf);
3508 }
3509 goto done_metadata;
3510 }
3511 frame_number_valid = *p_frame_number_valid;
3512 frame_number = *p_frame_number;
3513 capture_time = *p_capture_time;
3514 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3515 urgent_frame_number = *p_urgent_frame_number;
3516 currentSysTime = systemTime(CLOCK_MONOTONIC);
3517
3518 // Detect if buffers from any requests are overdue
3519 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003520 int64_t timeout;
3521 {
3522 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3523 // If there is a pending HDR+ request, the following requests may be blocked until the
3524 // HDR+ request is done. So allow a longer timeout.
3525 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3526 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3527 }
3528
3529 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003530 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003531 assert(missed.stream->priv);
3532 if (missed.stream->priv) {
3533 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3534 assert(ch->mStreams[0]);
3535 if (ch->mStreams[0]) {
3536 LOGE("Cancel missing frame = %d, buffer = %p,"
3537 "stream type = %d, stream format = %d",
3538 req.frame_number, missed.buffer,
3539 ch->mStreams[0]->getMyType(), missed.stream->format);
3540 ch->timeoutFrame(req.frame_number);
3541 }
3542 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003543 }
3544 }
3545 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003546 //For the very first metadata callback, regardless whether it contains valid
3547 //frame number, send the partial metadata for the jumpstarting requests.
3548 //Note that this has to be done even if the metadata doesn't contain valid
3549 //urgent frame number, because in the case only 1 request is ever submitted
3550 //to HAL, there won't be subsequent valid urgent frame number.
3551 if (mFirstMetadataCallback) {
3552 for (pendingRequestIterator i =
3553 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3554 if (i->bUseFirstPartial) {
3555 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3556 }
3557 }
3558 mFirstMetadataCallback = false;
3559 }
3560
Thierry Strudel3d639192016-09-09 11:52:26 -07003561 //Partial result on process_capture_result for timestamp
3562 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003563 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003564
3565 //Recieved an urgent Frame Number, handle it
3566 //using partial results
3567 for (pendingRequestIterator i =
3568 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3569 LOGD("Iterator Frame = %d urgent frame = %d",
3570 i->frame_number, urgent_frame_number);
3571
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003572 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003573 (i->partial_result_cnt == 0)) {
3574 LOGE("Error: HAL missed urgent metadata for frame number %d",
3575 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003576 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003577 }
3578
3579 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003580 i->partial_result_cnt == 0) {
3581 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003582 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3583 // Instant AEC settled for this frame.
3584 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3585 mInstantAECSettledFrameNumber = urgent_frame_number;
3586 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003587 break;
3588 }
3589 }
3590 }
3591
3592 if (!frame_number_valid) {
3593 LOGD("Not a valid normal frame number, used as SOF only");
3594 if (free_and_bufdone_meta_buf) {
3595 mMetadataChannel->bufDone(metadata_buf);
3596 free(metadata_buf);
3597 }
3598 goto done_metadata;
3599 }
3600 LOGH("valid frame_number = %u, capture_time = %lld",
3601 frame_number, capture_time);
3602
Emilian Peev7650c122017-01-19 08:24:33 -08003603 if (metadata->is_depth_data_valid) {
3604 handleDepthDataLocked(metadata->depth_data, frame_number);
3605 }
3606
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003607 // Check whether any stream buffer corresponding to this is dropped or not
3608 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3609 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3610 for (auto & pendingRequest : mPendingRequestsList) {
3611 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3612 mInstantAECSettledFrameNumber)) {
3613 camera3_notify_msg_t notify_msg = {};
3614 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003615 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003616 QCamera3ProcessingChannel *channel =
3617 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003618 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003619 if (p_cam_frame_drop) {
3620 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003621 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003622 // Got the stream ID for drop frame.
3623 dropFrame = true;
3624 break;
3625 }
3626 }
3627 } else {
3628 // This is instant AEC case.
3629 // For instant AEC drop the stream untill AEC is settled.
3630 dropFrame = true;
3631 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003632
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003633 if (dropFrame) {
3634 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3635 if (p_cam_frame_drop) {
3636 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003637 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003638 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003639 } else {
3640 // For instant AEC, inform frame drop and frame number
3641 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3642 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003643 pendingRequest.frame_number, streamID,
3644 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003645 }
3646 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003647 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003648 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003649 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003650 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003651 if (p_cam_frame_drop) {
3652 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003653 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003654 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003655 } else {
3656 // For instant AEC, inform frame drop and frame number
3657 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3658 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003659 pendingRequest.frame_number, streamID,
3660 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003661 }
3662 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003663 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003664 PendingFrameDrop.stream_ID = streamID;
3665 // Add the Frame drop info to mPendingFrameDropList
3666 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003667 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003668 }
3669 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003670 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003671
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003672 for (auto & pendingRequest : mPendingRequestsList) {
3673 // Find the pending request with the frame number.
3674 if (pendingRequest.frame_number == frame_number) {
3675 // Update the sensor timestamp.
3676 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003677
Thierry Strudel3d639192016-09-09 11:52:26 -07003678
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003679 /* Set the timestamp in display metadata so that clients aware of
3680 private_handle such as VT can use this un-modified timestamps.
3681 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003682 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003683
Thierry Strudel3d639192016-09-09 11:52:26 -07003684 // Find channel requiring metadata, meaning internal offline postprocess
3685 // is needed.
3686 //TODO: for now, we don't support two streams requiring metadata at the same time.
3687 // (because we are not making copies, and metadata buffer is not reference counted.
3688 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003689 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3690 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003691 if (iter->need_metadata) {
3692 internalPproc = true;
3693 QCamera3ProcessingChannel *channel =
3694 (QCamera3ProcessingChannel *)iter->stream->priv;
3695 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003696 if(p_is_metabuf_queued != NULL) {
3697 *p_is_metabuf_queued = true;
3698 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003699 break;
3700 }
3701 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003702 for (auto itr = pendingRequest.internalRequestList.begin();
3703 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003704 if (itr->need_metadata) {
3705 internalPproc = true;
3706 QCamera3ProcessingChannel *channel =
3707 (QCamera3ProcessingChannel *)itr->stream->priv;
3708 channel->queueReprocMetadata(metadata_buf);
3709 break;
3710 }
3711 }
3712
Thierry Strudel54dc9782017-02-15 12:12:10 -08003713 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003714
3715 bool *enableZsl = nullptr;
3716 if (gExposeEnableZslKey) {
3717 enableZsl = &pendingRequest.enableZsl;
3718 }
3719
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003720 resultMetadata = translateFromHalMetadata(metadata,
3721 pendingRequest.timestamp, pendingRequest.request_id,
3722 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3723 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003724 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003725 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003726 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003727 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003728 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003729 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003730
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003731 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003732
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003733 if (pendingRequest.blob_request) {
3734 //Dump tuning metadata if enabled and available
3735 char prop[PROPERTY_VALUE_MAX];
3736 memset(prop, 0, sizeof(prop));
3737 property_get("persist.camera.dumpmetadata", prop, "0");
3738 int32_t enabled = atoi(prop);
3739 if (enabled && metadata->is_tuning_params_valid) {
3740 dumpMetadataToFile(metadata->tuning_params,
3741 mMetaFrameCount,
3742 enabled,
3743 "Snapshot",
3744 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003745 }
3746 }
3747
3748 if (!internalPproc) {
3749 LOGD("couldn't find need_metadata for this metadata");
3750 // Return metadata buffer
3751 if (free_and_bufdone_meta_buf) {
3752 mMetadataChannel->bufDone(metadata_buf);
3753 free(metadata_buf);
3754 }
3755 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003756
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003757 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003758 }
3759 }
3760
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003761 // Try to send out shutter callbacks and capture results.
3762 handlePendingResultsWithLock(frame_number, resultMetadata);
3763 return;
3764
Thierry Strudel3d639192016-09-09 11:52:26 -07003765done_metadata:
3766 for (pendingRequestIterator i = mPendingRequestsList.begin();
3767 i != mPendingRequestsList.end() ;i++) {
3768 i->pipeline_depth++;
3769 }
3770 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3771 unblockRequestIfNecessary();
3772}
3773
3774/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003775 * FUNCTION : handleDepthDataWithLock
3776 *
3777 * DESCRIPTION: Handles incoming depth data
3778 *
3779 * PARAMETERS : @depthData : Depth data
3780 * @frameNumber: Frame number of the incoming depth data
3781 *
3782 * RETURN :
3783 *
3784 *==========================================================================*/
3785void QCamera3HardwareInterface::handleDepthDataLocked(
3786 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3787 uint32_t currentFrameNumber;
3788 buffer_handle_t *depthBuffer;
3789
3790 if (nullptr == mDepthChannel) {
3791 LOGE("Depth channel not present!");
3792 return;
3793 }
3794
3795 camera3_stream_buffer_t resultBuffer =
3796 {.acquire_fence = -1,
3797 .release_fence = -1,
3798 .status = CAMERA3_BUFFER_STATUS_OK,
3799 .buffer = nullptr,
3800 .stream = mDepthChannel->getStream()};
3801 camera3_capture_result_t result =
3802 {.result = nullptr,
3803 .num_output_buffers = 1,
3804 .output_buffers = &resultBuffer,
3805 .partial_result = 0,
3806 .frame_number = 0};
3807
3808 do {
3809 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3810 if (nullptr == depthBuffer) {
3811 break;
3812 }
3813
3814 result.frame_number = currentFrameNumber;
3815 resultBuffer.buffer = depthBuffer;
3816 if (currentFrameNumber == frameNumber) {
3817 int32_t rc = mDepthChannel->populateDepthData(depthData,
3818 frameNumber);
3819 if (NO_ERROR != rc) {
3820 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3821 } else {
3822 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3823 }
3824 } else if (currentFrameNumber > frameNumber) {
3825 break;
3826 } else {
3827 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3828 {{currentFrameNumber, mDepthChannel->getStream(),
3829 CAMERA3_MSG_ERROR_BUFFER}}};
3830 orchestrateNotify(&notify_msg);
3831
3832 LOGE("Depth buffer for frame number: %d is missing "
3833 "returning back!", currentFrameNumber);
3834 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3835 }
3836 mDepthChannel->unmapBuffer(currentFrameNumber);
3837
3838 orchestrateResult(&result);
3839 } while (currentFrameNumber < frameNumber);
3840}
3841
3842/*===========================================================================
3843 * FUNCTION : notifyErrorFoPendingDepthData
3844 *
3845 * DESCRIPTION: Returns error for any pending depth buffers
3846 *
3847 * PARAMETERS : depthCh - depth channel that needs to get flushed
3848 *
3849 * RETURN :
3850 *
3851 *==========================================================================*/
3852void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3853 QCamera3DepthChannel *depthCh) {
3854 uint32_t currentFrameNumber;
3855 buffer_handle_t *depthBuffer;
3856
3857 if (nullptr == depthCh) {
3858 return;
3859 }
3860
3861 camera3_notify_msg_t notify_msg =
3862 {.type = CAMERA3_MSG_ERROR,
3863 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3864 camera3_stream_buffer_t resultBuffer =
3865 {.acquire_fence = -1,
3866 .release_fence = -1,
3867 .buffer = nullptr,
3868 .stream = depthCh->getStream(),
3869 .status = CAMERA3_BUFFER_STATUS_ERROR};
3870 camera3_capture_result_t result =
3871 {.result = nullptr,
3872 .frame_number = 0,
3873 .num_output_buffers = 1,
3874 .partial_result = 0,
3875 .output_buffers = &resultBuffer};
3876
3877 while (nullptr !=
3878 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3879 depthCh->unmapBuffer(currentFrameNumber);
3880
3881 notify_msg.message.error.frame_number = currentFrameNumber;
3882 orchestrateNotify(&notify_msg);
3883
3884 resultBuffer.buffer = depthBuffer;
3885 result.frame_number = currentFrameNumber;
3886 orchestrateResult(&result);
3887 };
3888}
3889
3890/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003891 * FUNCTION : hdrPlusPerfLock
3892 *
3893 * DESCRIPTION: perf lock for HDR+ using custom intent
3894 *
3895 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3896 *
3897 * RETURN : None
3898 *
3899 *==========================================================================*/
3900void QCamera3HardwareInterface::hdrPlusPerfLock(
3901 mm_camera_super_buf_t *metadata_buf)
3902{
3903 if (NULL == metadata_buf) {
3904 LOGE("metadata_buf is NULL");
3905 return;
3906 }
3907 metadata_buffer_t *metadata =
3908 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3909 int32_t *p_frame_number_valid =
3910 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3911 uint32_t *p_frame_number =
3912 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3913
3914 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3915 LOGE("%s: Invalid metadata", __func__);
3916 return;
3917 }
3918
3919 //acquire perf lock for 5 sec after the last HDR frame is captured
3920 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3921 if ((p_frame_number != NULL) &&
3922 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003923 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003924 }
3925 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003926}
3927
3928/*===========================================================================
3929 * FUNCTION : handleInputBufferWithLock
3930 *
3931 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3932 *
3933 * PARAMETERS : @frame_number: frame number of the input buffer
3934 *
3935 * RETURN :
3936 *
3937 *==========================================================================*/
3938void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3939{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003940 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003941 pendingRequestIterator i = mPendingRequestsList.begin();
3942 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3943 i++;
3944 }
3945 if (i != mPendingRequestsList.end() && i->input_buffer) {
3946 //found the right request
3947 if (!i->shutter_notified) {
3948 CameraMetadata settings;
3949 camera3_notify_msg_t notify_msg;
3950 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3951 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3952 if(i->settings) {
3953 settings = i->settings;
3954 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3955 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3956 } else {
3957 LOGE("No timestamp in input settings! Using current one.");
3958 }
3959 } else {
3960 LOGE("Input settings missing!");
3961 }
3962
3963 notify_msg.type = CAMERA3_MSG_SHUTTER;
3964 notify_msg.message.shutter.frame_number = frame_number;
3965 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003966 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003967 i->shutter_notified = true;
3968 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3969 i->frame_number, notify_msg.message.shutter.timestamp);
3970 }
3971
3972 if (i->input_buffer->release_fence != -1) {
3973 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3974 close(i->input_buffer->release_fence);
3975 if (rc != OK) {
3976 LOGE("input buffer sync wait failed %d", rc);
3977 }
3978 }
3979
3980 camera3_capture_result result;
3981 memset(&result, 0, sizeof(camera3_capture_result));
3982 result.frame_number = frame_number;
3983 result.result = i->settings;
3984 result.input_buffer = i->input_buffer;
3985 result.partial_result = PARTIAL_RESULT_COUNT;
3986
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003987 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003988 LOGD("Input request metadata and input buffer frame_number = %u",
3989 i->frame_number);
3990 i = erasePendingRequest(i);
3991 } else {
3992 LOGE("Could not find input request for frame number %d", frame_number);
3993 }
3994}
3995
3996/*===========================================================================
3997 * FUNCTION : handleBufferWithLock
3998 *
3999 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4000 *
4001 * PARAMETERS : @buffer: image buffer for the callback
4002 * @frame_number: frame number of the image buffer
4003 *
4004 * RETURN :
4005 *
4006 *==========================================================================*/
4007void QCamera3HardwareInterface::handleBufferWithLock(
4008 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4009{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004010 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004011
4012 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4013 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4014 }
4015
Thierry Strudel3d639192016-09-09 11:52:26 -07004016 /* Nothing to be done during error state */
4017 if ((ERROR == mState) || (DEINIT == mState)) {
4018 return;
4019 }
4020 if (mFlushPerf) {
4021 handleBuffersDuringFlushLock(buffer);
4022 return;
4023 }
4024 //not in flush
4025 // If the frame number doesn't exist in the pending request list,
4026 // directly send the buffer to the frameworks, and update pending buffers map
4027 // Otherwise, book-keep the buffer.
4028 pendingRequestIterator i = mPendingRequestsList.begin();
4029 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4030 i++;
4031 }
4032 if (i == mPendingRequestsList.end()) {
4033 // Verify all pending requests frame_numbers are greater
4034 for (pendingRequestIterator j = mPendingRequestsList.begin();
4035 j != mPendingRequestsList.end(); j++) {
4036 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
4037 LOGW("Error: pending live frame number %d is smaller than %d",
4038 j->frame_number, frame_number);
4039 }
4040 }
4041 camera3_capture_result_t result;
4042 memset(&result, 0, sizeof(camera3_capture_result_t));
4043 result.result = NULL;
4044 result.frame_number = frame_number;
4045 result.num_output_buffers = 1;
4046 result.partial_result = 0;
4047 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4048 m != mPendingFrameDropList.end(); m++) {
4049 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4050 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4051 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4052 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4053 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4054 frame_number, streamID);
4055 m = mPendingFrameDropList.erase(m);
4056 break;
4057 }
4058 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004059 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07004060 result.output_buffers = buffer;
4061 LOGH("result frame_number = %d, buffer = %p",
4062 frame_number, buffer->buffer);
4063
4064 mPendingBuffersMap.removeBuf(buffer->buffer);
4065
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004066 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004067 } else {
4068 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004069 if (i->input_buffer->release_fence != -1) {
4070 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4071 close(i->input_buffer->release_fence);
4072 if (rc != OK) {
4073 LOGE("input buffer sync wait failed %d", rc);
4074 }
4075 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004076 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004077
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004078 // Put buffer into the pending request
4079 for (auto &requestedBuffer : i->buffers) {
4080 if (requestedBuffer.stream == buffer->stream) {
4081 if (requestedBuffer.buffer != nullptr) {
4082 LOGE("Error: buffer is already set");
4083 } else {
4084 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4085 sizeof(camera3_stream_buffer_t));
4086 *(requestedBuffer.buffer) = *buffer;
4087 LOGH("cache buffer %p at result frame_number %u",
4088 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004089 }
4090 }
4091 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004092
4093 if (i->input_buffer) {
4094 // For a reprocessing request, try to send out shutter callback and result metadata.
4095 handlePendingResultsWithLock(frame_number, nullptr);
4096 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004097 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004098
4099 if (mPreviewStarted == false) {
4100 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4101 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004102 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4103
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004104 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4105 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4106 mPreviewStarted = true;
4107
4108 // Set power hint for preview
4109 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4110 }
4111 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004112}
4113
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004114void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4115 const camera_metadata_t *resultMetadata)
4116{
4117 // Find the pending request for this result metadata.
4118 auto requestIter = mPendingRequestsList.begin();
4119 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4120 requestIter++;
4121 }
4122
4123 if (requestIter == mPendingRequestsList.end()) {
4124 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4125 return;
4126 }
4127
4128 // Update the result metadata
4129 requestIter->resultMetadata = resultMetadata;
4130
4131 // Check what type of request this is.
4132 bool liveRequest = false;
4133 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004134 // HDR+ request doesn't have partial results.
4135 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004136 } else if (requestIter->input_buffer != nullptr) {
4137 // Reprocessing request result is the same as settings.
4138 requestIter->resultMetadata = requestIter->settings;
4139 // Reprocessing request doesn't have partial results.
4140 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4141 } else {
4142 liveRequest = true;
4143 requestIter->partial_result_cnt++;
4144 mPendingLiveRequest--;
4145
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004146 {
4147 Mutex::Autolock l(gHdrPlusClientLock);
4148 // For a live request, send the metadata to HDR+ client.
4149 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4150 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4151 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4152 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004153 }
4154 }
4155
4156 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4157 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4158 bool readyToSend = true;
4159
4160 // Iterate through the pending requests to send out shutter callbacks and results that are
4161 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4162 // live requests that don't have result metadata yet.
4163 auto iter = mPendingRequestsList.begin();
4164 while (iter != mPendingRequestsList.end()) {
4165 // Check if current pending request is ready. If it's not ready, the following pending
4166 // requests are also not ready.
4167 if (readyToSend && iter->resultMetadata == nullptr) {
4168 readyToSend = false;
4169 }
4170
4171 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4172
4173 std::vector<camera3_stream_buffer_t> outputBuffers;
4174
4175 camera3_capture_result_t result = {};
4176 result.frame_number = iter->frame_number;
4177 result.result = iter->resultMetadata;
4178 result.partial_result = iter->partial_result_cnt;
4179
4180 // If this pending buffer has result metadata, we may be able to send out shutter callback
4181 // and result metadata.
4182 if (iter->resultMetadata != nullptr) {
4183 if (!readyToSend) {
4184 // If any of the previous pending request is not ready, this pending request is
4185 // also not ready to send in order to keep shutter callbacks and result metadata
4186 // in order.
4187 iter++;
4188 continue;
4189 }
4190
4191 // Invoke shutter callback if not yet.
4192 if (!iter->shutter_notified) {
4193 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4194
4195 // Find the timestamp in HDR+ result metadata
4196 camera_metadata_ro_entry_t entry;
4197 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4198 ANDROID_SENSOR_TIMESTAMP, &entry);
4199 if (res != OK) {
4200 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4201 __FUNCTION__, iter->frame_number, strerror(-res), res);
4202 } else {
4203 timestamp = entry.data.i64[0];
4204 }
4205
4206 camera3_notify_msg_t notify_msg = {};
4207 notify_msg.type = CAMERA3_MSG_SHUTTER;
4208 notify_msg.message.shutter.frame_number = iter->frame_number;
4209 notify_msg.message.shutter.timestamp = timestamp;
4210 orchestrateNotify(&notify_msg);
4211 iter->shutter_notified = true;
4212 }
4213
4214 result.input_buffer = iter->input_buffer;
4215
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004216 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4217 // If the result metadata belongs to a live request, notify errors for previous pending
4218 // live requests.
4219 mPendingLiveRequest--;
4220
4221 CameraMetadata dummyMetadata;
4222 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4223 result.result = dummyMetadata.release();
4224
4225 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004226
4227 // partial_result should be PARTIAL_RESULT_CNT in case of
4228 // ERROR_RESULT.
4229 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4230 result.partial_result = PARTIAL_RESULT_COUNT;
4231
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004232 } else {
4233 iter++;
4234 continue;
4235 }
4236
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004237 // Prepare output buffer array
4238 for (auto bufferInfoIter = iter->buffers.begin();
4239 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4240 if (bufferInfoIter->buffer != nullptr) {
4241
4242 QCamera3Channel *channel =
4243 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4244 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4245
4246 // Check if this buffer is a dropped frame.
4247 auto frameDropIter = mPendingFrameDropList.begin();
4248 while (frameDropIter != mPendingFrameDropList.end()) {
4249 if((frameDropIter->stream_ID == streamID) &&
4250 (frameDropIter->frame_number == frameNumber)) {
4251 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4252 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4253 streamID);
4254 mPendingFrameDropList.erase(frameDropIter);
4255 break;
4256 } else {
4257 frameDropIter++;
4258 }
4259 }
4260
4261 // Check buffer error status
4262 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4263 bufferInfoIter->buffer->buffer);
4264 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4265
4266 outputBuffers.push_back(*(bufferInfoIter->buffer));
4267 free(bufferInfoIter->buffer);
4268 bufferInfoIter->buffer = NULL;
4269 }
4270 }
4271
4272 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4273 result.num_output_buffers = outputBuffers.size();
4274
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004275 orchestrateResult(&result);
4276
4277 // For reprocessing, result metadata is the same as settings so do not free it here to
4278 // avoid double free.
4279 if (result.result != iter->settings) {
4280 free_camera_metadata((camera_metadata_t *)result.result);
4281 }
4282 iter->resultMetadata = nullptr;
4283 iter = erasePendingRequest(iter);
4284 }
4285
4286 if (liveRequest) {
4287 for (auto &iter : mPendingRequestsList) {
4288 // Increment pipeline depth for the following pending requests.
4289 if (iter.frame_number > frameNumber) {
4290 iter.pipeline_depth++;
4291 }
4292 }
4293 }
4294
4295 unblockRequestIfNecessary();
4296}
4297
Thierry Strudel3d639192016-09-09 11:52:26 -07004298/*===========================================================================
4299 * FUNCTION : unblockRequestIfNecessary
4300 *
4301 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4302 * that mMutex is held when this function is called.
4303 *
4304 * PARAMETERS :
4305 *
4306 * RETURN :
4307 *
4308 *==========================================================================*/
4309void QCamera3HardwareInterface::unblockRequestIfNecessary()
4310{
4311 // Unblock process_capture_request
4312 pthread_cond_signal(&mRequestCond);
4313}
4314
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004315/*===========================================================================
4316 * FUNCTION : isHdrSnapshotRequest
4317 *
4318 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4319 *
4320 * PARAMETERS : camera3 request structure
4321 *
4322 * RETURN : boolean decision variable
4323 *
4324 *==========================================================================*/
4325bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4326{
4327 if (request == NULL) {
4328 LOGE("Invalid request handle");
4329 assert(0);
4330 return false;
4331 }
4332
4333 if (!mForceHdrSnapshot) {
4334 CameraMetadata frame_settings;
4335 frame_settings = request->settings;
4336
4337 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4338 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4339 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4340 return false;
4341 }
4342 } else {
4343 return false;
4344 }
4345
4346 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4347 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4348 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4349 return false;
4350 }
4351 } else {
4352 return false;
4353 }
4354 }
4355
4356 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4357 if (request->output_buffers[i].stream->format
4358 == HAL_PIXEL_FORMAT_BLOB) {
4359 return true;
4360 }
4361 }
4362
4363 return false;
4364}
4365/*===========================================================================
4366 * FUNCTION : orchestrateRequest
4367 *
4368 * DESCRIPTION: Orchestrates a capture request from camera service
4369 *
4370 * PARAMETERS :
4371 * @request : request from framework to process
4372 *
4373 * RETURN : Error status codes
4374 *
4375 *==========================================================================*/
4376int32_t QCamera3HardwareInterface::orchestrateRequest(
4377 camera3_capture_request_t *request)
4378{
4379
4380 uint32_t originalFrameNumber = request->frame_number;
4381 uint32_t originalOutputCount = request->num_output_buffers;
4382 const camera_metadata_t *original_settings = request->settings;
4383 List<InternalRequest> internallyRequestedStreams;
4384 List<InternalRequest> emptyInternalList;
4385
4386 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4387 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4388 uint32_t internalFrameNumber;
4389 CameraMetadata modified_meta;
4390
4391
4392 /* Add Blob channel to list of internally requested streams */
4393 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4394 if (request->output_buffers[i].stream->format
4395 == HAL_PIXEL_FORMAT_BLOB) {
4396 InternalRequest streamRequested;
4397 streamRequested.meteringOnly = 1;
4398 streamRequested.need_metadata = 0;
4399 streamRequested.stream = request->output_buffers[i].stream;
4400 internallyRequestedStreams.push_back(streamRequested);
4401 }
4402 }
4403 request->num_output_buffers = 0;
4404 auto itr = internallyRequestedStreams.begin();
4405
4406 /* Modify setting to set compensation */
4407 modified_meta = request->settings;
4408 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4409 uint8_t aeLock = 1;
4410 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4411 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4412 camera_metadata_t *modified_settings = modified_meta.release();
4413 request->settings = modified_settings;
4414
4415 /* Capture Settling & -2x frame */
4416 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4417 request->frame_number = internalFrameNumber;
4418 processCaptureRequest(request, internallyRequestedStreams);
4419
4420 request->num_output_buffers = originalOutputCount;
4421 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4422 request->frame_number = internalFrameNumber;
4423 processCaptureRequest(request, emptyInternalList);
4424 request->num_output_buffers = 0;
4425
4426 modified_meta = modified_settings;
4427 expCompensation = 0;
4428 aeLock = 1;
4429 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4430 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4431 modified_settings = modified_meta.release();
4432 request->settings = modified_settings;
4433
4434 /* Capture Settling & 0X frame */
4435
4436 itr = internallyRequestedStreams.begin();
4437 if (itr == internallyRequestedStreams.end()) {
4438 LOGE("Error Internally Requested Stream list is empty");
4439 assert(0);
4440 } else {
4441 itr->need_metadata = 0;
4442 itr->meteringOnly = 1;
4443 }
4444
4445 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4446 request->frame_number = internalFrameNumber;
4447 processCaptureRequest(request, internallyRequestedStreams);
4448
4449 itr = internallyRequestedStreams.begin();
4450 if (itr == internallyRequestedStreams.end()) {
4451 ALOGE("Error Internally Requested Stream list is empty");
4452 assert(0);
4453 } else {
4454 itr->need_metadata = 1;
4455 itr->meteringOnly = 0;
4456 }
4457
4458 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4459 request->frame_number = internalFrameNumber;
4460 processCaptureRequest(request, internallyRequestedStreams);
4461
4462 /* Capture 2X frame*/
4463 modified_meta = modified_settings;
4464 expCompensation = GB_HDR_2X_STEP_EV;
4465 aeLock = 1;
4466 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4467 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4468 modified_settings = modified_meta.release();
4469 request->settings = modified_settings;
4470
4471 itr = internallyRequestedStreams.begin();
4472 if (itr == internallyRequestedStreams.end()) {
4473 ALOGE("Error Internally Requested Stream list is empty");
4474 assert(0);
4475 } else {
4476 itr->need_metadata = 0;
4477 itr->meteringOnly = 1;
4478 }
4479 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4480 request->frame_number = internalFrameNumber;
4481 processCaptureRequest(request, internallyRequestedStreams);
4482
4483 itr = internallyRequestedStreams.begin();
4484 if (itr == internallyRequestedStreams.end()) {
4485 ALOGE("Error Internally Requested Stream list is empty");
4486 assert(0);
4487 } else {
4488 itr->need_metadata = 1;
4489 itr->meteringOnly = 0;
4490 }
4491
4492 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4493 request->frame_number = internalFrameNumber;
4494 processCaptureRequest(request, internallyRequestedStreams);
4495
4496
4497 /* Capture 2X on original streaming config*/
4498 internallyRequestedStreams.clear();
4499
4500 /* Restore original settings pointer */
4501 request->settings = original_settings;
4502 } else {
4503 uint32_t internalFrameNumber;
4504 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4505 request->frame_number = internalFrameNumber;
4506 return processCaptureRequest(request, internallyRequestedStreams);
4507 }
4508
4509 return NO_ERROR;
4510}
4511
4512/*===========================================================================
4513 * FUNCTION : orchestrateResult
4514 *
4515 * DESCRIPTION: Orchestrates a capture result to camera service
4516 *
4517 * PARAMETERS :
4518 * @request : request from framework to process
4519 *
4520 * RETURN :
4521 *
4522 *==========================================================================*/
4523void QCamera3HardwareInterface::orchestrateResult(
4524 camera3_capture_result_t *result)
4525{
4526 uint32_t frameworkFrameNumber;
4527 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4528 frameworkFrameNumber);
4529 if (rc != NO_ERROR) {
4530 LOGE("Cannot find translated frameworkFrameNumber");
4531 assert(0);
4532 } else {
4533 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004534 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004535 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004536 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004537 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4538 camera_metadata_entry_t entry;
4539 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4540 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004541 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004542 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4543 if (ret != OK)
4544 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004545 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004546 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004547 result->frame_number = frameworkFrameNumber;
4548 mCallbackOps->process_capture_result(mCallbackOps, result);
4549 }
4550 }
4551}
4552
4553/*===========================================================================
4554 * FUNCTION : orchestrateNotify
4555 *
4556 * DESCRIPTION: Orchestrates a notify to camera service
4557 *
4558 * PARAMETERS :
4559 * @request : request from framework to process
4560 *
4561 * RETURN :
4562 *
4563 *==========================================================================*/
4564void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4565{
4566 uint32_t frameworkFrameNumber;
4567 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004568 int32_t rc = NO_ERROR;
4569
4570 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004571 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004572
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004573 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004574 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4575 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4576 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004577 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004578 LOGE("Cannot find translated frameworkFrameNumber");
4579 assert(0);
4580 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004581 }
4582 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004583
4584 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4585 LOGD("Internal Request drop the notifyCb");
4586 } else {
4587 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4588 mCallbackOps->notify(mCallbackOps, notify_msg);
4589 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004590}
4591
4592/*===========================================================================
4593 * FUNCTION : FrameNumberRegistry
4594 *
4595 * DESCRIPTION: Constructor
4596 *
4597 * PARAMETERS :
4598 *
4599 * RETURN :
4600 *
4601 *==========================================================================*/
4602FrameNumberRegistry::FrameNumberRegistry()
4603{
4604 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4605}
4606
4607/*===========================================================================
4608 * FUNCTION : ~FrameNumberRegistry
4609 *
4610 * DESCRIPTION: Destructor
4611 *
4612 * PARAMETERS :
4613 *
4614 * RETURN :
4615 *
4616 *==========================================================================*/
4617FrameNumberRegistry::~FrameNumberRegistry()
4618{
4619}
4620
4621/*===========================================================================
4622 * FUNCTION : PurgeOldEntriesLocked
4623 *
4624 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4625 *
4626 * PARAMETERS :
4627 *
4628 * RETURN : NONE
4629 *
4630 *==========================================================================*/
4631void FrameNumberRegistry::purgeOldEntriesLocked()
4632{
4633 while (_register.begin() != _register.end()) {
4634 auto itr = _register.begin();
4635 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4636 _register.erase(itr);
4637 } else {
4638 return;
4639 }
4640 }
4641}
4642
4643/*===========================================================================
4644 * FUNCTION : allocStoreInternalFrameNumber
4645 *
4646 * DESCRIPTION: Method to note down a framework request and associate a new
4647 * internal request number against it
4648 *
4649 * PARAMETERS :
4650 * @fFrameNumber: Identifier given by framework
4651 * @internalFN : Output parameter which will have the newly generated internal
4652 * entry
4653 *
4654 * RETURN : Error code
4655 *
4656 *==========================================================================*/
4657int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4658 uint32_t &internalFrameNumber)
4659{
4660 Mutex::Autolock lock(mRegistryLock);
4661 internalFrameNumber = _nextFreeInternalNumber++;
4662 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4663 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4664 purgeOldEntriesLocked();
4665 return NO_ERROR;
4666}
4667
4668/*===========================================================================
4669 * FUNCTION : generateStoreInternalFrameNumber
4670 *
4671 * DESCRIPTION: Method to associate a new internal request number independent
4672 * of any associate with framework requests
4673 *
4674 * PARAMETERS :
4675 * @internalFrame#: Output parameter which will have the newly generated internal
4676 *
4677 *
4678 * RETURN : Error code
4679 *
4680 *==========================================================================*/
4681int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4682{
4683 Mutex::Autolock lock(mRegistryLock);
4684 internalFrameNumber = _nextFreeInternalNumber++;
4685 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4686 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4687 purgeOldEntriesLocked();
4688 return NO_ERROR;
4689}
4690
4691/*===========================================================================
4692 * FUNCTION : getFrameworkFrameNumber
4693 *
4694 * DESCRIPTION: Method to query the framework framenumber given an internal #
4695 *
4696 * PARAMETERS :
4697 * @internalFrame#: Internal reference
4698 * @frameworkframenumber: Output parameter holding framework frame entry
4699 *
4700 * RETURN : Error code
4701 *
4702 *==========================================================================*/
4703int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4704 uint32_t &frameworkFrameNumber)
4705{
4706 Mutex::Autolock lock(mRegistryLock);
4707 auto itr = _register.find(internalFrameNumber);
4708 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004709 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004710 return -ENOENT;
4711 }
4712
4713 frameworkFrameNumber = itr->second;
4714 purgeOldEntriesLocked();
4715 return NO_ERROR;
4716}
Thierry Strudel3d639192016-09-09 11:52:26 -07004717
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004718status_t QCamera3HardwareInterface::fillPbStreamConfig(
4719 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4720 QCamera3Channel *channel, uint32_t streamIndex) {
4721 if (config == nullptr) {
4722 LOGE("%s: config is null", __FUNCTION__);
4723 return BAD_VALUE;
4724 }
4725
4726 if (channel == nullptr) {
4727 LOGE("%s: channel is null", __FUNCTION__);
4728 return BAD_VALUE;
4729 }
4730
4731 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4732 if (stream == nullptr) {
4733 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4734 return NAME_NOT_FOUND;
4735 }
4736
4737 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4738 if (streamInfo == nullptr) {
4739 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4740 return NAME_NOT_FOUND;
4741 }
4742
4743 config->id = pbStreamId;
4744 config->image.width = streamInfo->dim.width;
4745 config->image.height = streamInfo->dim.height;
4746 config->image.padding = 0;
4747 config->image.format = pbStreamFormat;
4748
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004749 uint32_t totalPlaneSize = 0;
4750
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004751 // Fill plane information.
4752 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4753 pbcamera::PlaneConfiguration plane;
4754 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4755 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4756 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004757
4758 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004759 }
4760
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004761 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004762 return OK;
4763}
4764
Thierry Strudel3d639192016-09-09 11:52:26 -07004765/*===========================================================================
4766 * FUNCTION : processCaptureRequest
4767 *
4768 * DESCRIPTION: process a capture request from camera service
4769 *
4770 * PARAMETERS :
4771 * @request : request from framework to process
4772 *
4773 * RETURN :
4774 *
4775 *==========================================================================*/
4776int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004777 camera3_capture_request_t *request,
4778 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004779{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004780 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004781 int rc = NO_ERROR;
4782 int32_t request_id;
4783 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004784 bool isVidBufRequested = false;
4785 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004786 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004787
4788 pthread_mutex_lock(&mMutex);
4789
4790 // Validate current state
4791 switch (mState) {
4792 case CONFIGURED:
4793 case STARTED:
4794 /* valid state */
4795 break;
4796
4797 case ERROR:
4798 pthread_mutex_unlock(&mMutex);
4799 handleCameraDeviceError();
4800 return -ENODEV;
4801
4802 default:
4803 LOGE("Invalid state %d", mState);
4804 pthread_mutex_unlock(&mMutex);
4805 return -ENODEV;
4806 }
4807
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004808 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004809 if (rc != NO_ERROR) {
4810 LOGE("incoming request is not valid");
4811 pthread_mutex_unlock(&mMutex);
4812 return rc;
4813 }
4814
4815 meta = request->settings;
4816
4817 // For first capture request, send capture intent, and
4818 // stream on all streams
4819 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004820 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004821 // send an unconfigure to the backend so that the isp
4822 // resources are deallocated
4823 if (!mFirstConfiguration) {
4824 cam_stream_size_info_t stream_config_info;
4825 int32_t hal_version = CAM_HAL_V3;
4826 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4827 stream_config_info.buffer_info.min_buffers =
4828 MIN_INFLIGHT_REQUESTS;
4829 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004830 m_bIs4KVideo ? 0 :
4831 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 clear_metadata_buffer(mParameters);
4833 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4834 CAM_INTF_PARM_HAL_VERSION, hal_version);
4835 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4836 CAM_INTF_META_STREAM_INFO, stream_config_info);
4837 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4838 mParameters);
4839 if (rc < 0) {
4840 LOGE("set_parms for unconfigure failed");
4841 pthread_mutex_unlock(&mMutex);
4842 return rc;
4843 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004844
Thierry Strudel3d639192016-09-09 11:52:26 -07004845 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004846 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004847 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004848 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004849 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004850 property_get("persist.camera.is_type", is_type_value, "4");
4851 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4852 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4853 property_get("persist.camera.is_type_preview", is_type_value, "4");
4854 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4855 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004856
4857 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4858 int32_t hal_version = CAM_HAL_V3;
4859 uint8_t captureIntent =
4860 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4861 mCaptureIntent = captureIntent;
4862 clear_metadata_buffer(mParameters);
4863 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4864 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4865 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004866 if (mFirstConfiguration) {
4867 // configure instant AEC
4868 // Instant AEC is a session based parameter and it is needed only
4869 // once per complete session after open camera.
4870 // i.e. This is set only once for the first capture request, after open camera.
4871 setInstantAEC(meta);
4872 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004873 uint8_t fwkVideoStabMode=0;
4874 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4875 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4876 }
4877
Xue Tuecac74e2017-04-17 13:58:15 -07004878 // If EIS setprop is enabled then only turn it on for video/preview
4879 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004880 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004881 int32_t vsMode;
4882 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4883 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4884 rc = BAD_VALUE;
4885 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004886 LOGD("setEis %d", setEis);
4887 bool eis3Supported = false;
4888 size_t count = IS_TYPE_MAX;
4889 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4890 for (size_t i = 0; i < count; i++) {
4891 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4892 eis3Supported = true;
4893 break;
4894 }
4895 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004896
4897 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004898 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004899 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4900 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004901 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4902 is_type = isTypePreview;
4903 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4904 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4905 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004906 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004907 } else {
4908 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004909 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004910 } else {
4911 is_type = IS_TYPE_NONE;
4912 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004913 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004914 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004915 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4916 }
4917 }
4918
4919 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4920 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4921
Thierry Strudel54dc9782017-02-15 12:12:10 -08004922 //Disable tintless only if the property is set to 0
4923 memset(prop, 0, sizeof(prop));
4924 property_get("persist.camera.tintless.enable", prop, "1");
4925 int32_t tintless_value = atoi(prop);
4926
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4928 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004929
Thierry Strudel3d639192016-09-09 11:52:26 -07004930 //Disable CDS for HFR mode or if DIS/EIS is on.
4931 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4932 //after every configure_stream
4933 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4934 (m_bIsVideo)) {
4935 int32_t cds = CAM_CDS_MODE_OFF;
4936 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4937 CAM_INTF_PARM_CDS_MODE, cds))
4938 LOGE("Failed to disable CDS for HFR mode");
4939
4940 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004941
4942 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4943 uint8_t* use_av_timer = NULL;
4944
4945 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004946 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004947 use_av_timer = &m_debug_avtimer;
4948 }
4949 else{
4950 use_av_timer =
4951 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004952 if (use_av_timer) {
4953 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4954 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004955 }
4956
4957 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4958 rc = BAD_VALUE;
4959 }
4960 }
4961
Thierry Strudel3d639192016-09-09 11:52:26 -07004962 setMobicat();
4963
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004964 uint8_t nrMode = 0;
4965 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4966 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4967 }
4968
Thierry Strudel3d639192016-09-09 11:52:26 -07004969 /* Set fps and hfr mode while sending meta stream info so that sensor
4970 * can configure appropriate streaming mode */
4971 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004972 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4973 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004974 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4975 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004976 if (rc == NO_ERROR) {
4977 int32_t max_fps =
4978 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004979 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004980 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4981 }
4982 /* For HFR, more buffers are dequeued upfront to improve the performance */
4983 if (mBatchSize) {
4984 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4985 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4986 }
4987 }
4988 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004989 LOGE("setHalFpsRange failed");
4990 }
4991 }
4992 if (meta.exists(ANDROID_CONTROL_MODE)) {
4993 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4994 rc = extractSceneMode(meta, metaMode, mParameters);
4995 if (rc != NO_ERROR) {
4996 LOGE("extractSceneMode failed");
4997 }
4998 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004999 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005000
Thierry Strudel04e026f2016-10-10 11:27:36 -07005001 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5002 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5003 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5004 rc = setVideoHdrMode(mParameters, vhdr);
5005 if (rc != NO_ERROR) {
5006 LOGE("setVideoHDR is failed");
5007 }
5008 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005009
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005010 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005011 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005012 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005013 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5014 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5015 sensorModeFullFov)) {
5016 rc = BAD_VALUE;
5017 }
5018 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005019 //TODO: validate the arguments, HSV scenemode should have only the
5020 //advertised fps ranges
5021
5022 /*set the capture intent, hal version, tintless, stream info,
5023 *and disenable parameters to the backend*/
5024 LOGD("set_parms META_STREAM_INFO " );
5025 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005026 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5027 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005028 mStreamConfigInfo.type[i],
5029 mStreamConfigInfo.stream_sizes[i].width,
5030 mStreamConfigInfo.stream_sizes[i].height,
5031 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005032 mStreamConfigInfo.format[i],
5033 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005034 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005035
Thierry Strudel3d639192016-09-09 11:52:26 -07005036 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5037 mParameters);
5038 if (rc < 0) {
5039 LOGE("set_parms failed for hal version, stream info");
5040 }
5041
Chien-Yu Chenee335912017-02-09 17:53:20 -08005042 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5043 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005044 if (rc != NO_ERROR) {
5045 LOGE("Failed to get sensor output size");
5046 pthread_mutex_unlock(&mMutex);
5047 goto error_exit;
5048 }
5049
5050 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5051 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08005052 mSensorModeInfo.active_array_size.width,
5053 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005054
5055 /* Set batchmode before initializing channel. Since registerBuffer
5056 * internally initializes some of the channels, better set batchmode
5057 * even before first register buffer */
5058 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5059 it != mStreamInfo.end(); it++) {
5060 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5061 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5062 && mBatchSize) {
5063 rc = channel->setBatchSize(mBatchSize);
5064 //Disable per frame map unmap for HFR/batchmode case
5065 rc |= channel->setPerFrameMapUnmap(false);
5066 if (NO_ERROR != rc) {
5067 LOGE("Channel init failed %d", rc);
5068 pthread_mutex_unlock(&mMutex);
5069 goto error_exit;
5070 }
5071 }
5072 }
5073
5074 //First initialize all streams
5075 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5076 it != mStreamInfo.end(); it++) {
5077 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005078
5079 /* Initial value of NR mode is needed before stream on */
5080 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005081 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5082 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005083 setEis) {
5084 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5085 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5086 is_type = mStreamConfigInfo.is_type[i];
5087 break;
5088 }
5089 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005090 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005091 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005092 rc = channel->initialize(IS_TYPE_NONE);
5093 }
5094 if (NO_ERROR != rc) {
5095 LOGE("Channel initialization failed %d", rc);
5096 pthread_mutex_unlock(&mMutex);
5097 goto error_exit;
5098 }
5099 }
5100
5101 if (mRawDumpChannel) {
5102 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5103 if (rc != NO_ERROR) {
5104 LOGE("Error: Raw Dump Channel init failed");
5105 pthread_mutex_unlock(&mMutex);
5106 goto error_exit;
5107 }
5108 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005109 if (mHdrPlusRawSrcChannel) {
5110 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5111 if (rc != NO_ERROR) {
5112 LOGE("Error: HDR+ RAW Source Channel init failed");
5113 pthread_mutex_unlock(&mMutex);
5114 goto error_exit;
5115 }
5116 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005117 if (mSupportChannel) {
5118 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5119 if (rc < 0) {
5120 LOGE("Support channel initialization failed");
5121 pthread_mutex_unlock(&mMutex);
5122 goto error_exit;
5123 }
5124 }
5125 if (mAnalysisChannel) {
5126 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5127 if (rc < 0) {
5128 LOGE("Analysis channel initialization failed");
5129 pthread_mutex_unlock(&mMutex);
5130 goto error_exit;
5131 }
5132 }
5133 if (mDummyBatchChannel) {
5134 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5135 if (rc < 0) {
5136 LOGE("mDummyBatchChannel setBatchSize failed");
5137 pthread_mutex_unlock(&mMutex);
5138 goto error_exit;
5139 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005140 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005141 if (rc < 0) {
5142 LOGE("mDummyBatchChannel initialization failed");
5143 pthread_mutex_unlock(&mMutex);
5144 goto error_exit;
5145 }
5146 }
5147
5148 // Set bundle info
5149 rc = setBundleInfo();
5150 if (rc < 0) {
5151 LOGE("setBundleInfo failed %d", rc);
5152 pthread_mutex_unlock(&mMutex);
5153 goto error_exit;
5154 }
5155
5156 //update settings from app here
5157 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5158 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5159 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5160 }
5161 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5162 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5163 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5164 }
5165 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5166 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5167 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5168
5169 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5170 (mLinkedCameraId != mCameraId) ) {
5171 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5172 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005173 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 goto error_exit;
5175 }
5176 }
5177
5178 // add bundle related cameras
5179 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5180 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005181 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5182 &m_pDualCamCmdPtr->bundle_info;
5183 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005184 if (mIsDeviceLinked)
5185 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5186 else
5187 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5188
5189 pthread_mutex_lock(&gCamLock);
5190
5191 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5192 LOGE("Dualcam: Invalid Session Id ");
5193 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005194 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005195 goto error_exit;
5196 }
5197
5198 if (mIsMainCamera == 1) {
5199 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5200 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005201 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005202 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005203 // related session id should be session id of linked session
5204 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5205 } else {
5206 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5207 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005208 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005209 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005210 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5211 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005212 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005213 pthread_mutex_unlock(&gCamLock);
5214
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005215 rc = mCameraHandle->ops->set_dual_cam_cmd(
5216 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005217 if (rc < 0) {
5218 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005219 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005220 goto error_exit;
5221 }
5222 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005223 goto no_error;
5224error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005225 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005226 return rc;
5227no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 mWokenUpByDaemon = false;
5229 mPendingLiveRequest = 0;
5230 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005231 }
5232
5233 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005234 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005235
5236 if (mFlushPerf) {
5237 //we cannot accept any requests during flush
5238 LOGE("process_capture_request cannot proceed during flush");
5239 pthread_mutex_unlock(&mMutex);
5240 return NO_ERROR; //should return an error
5241 }
5242
5243 if (meta.exists(ANDROID_REQUEST_ID)) {
5244 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5245 mCurrentRequestId = request_id;
5246 LOGD("Received request with id: %d", request_id);
5247 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5248 LOGE("Unable to find request id field, \
5249 & no previous id available");
5250 pthread_mutex_unlock(&mMutex);
5251 return NAME_NOT_FOUND;
5252 } else {
5253 LOGD("Re-using old request id");
5254 request_id = mCurrentRequestId;
5255 }
5256
5257 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5258 request->num_output_buffers,
5259 request->input_buffer,
5260 frameNumber);
5261 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005262 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005263 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005264 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005265 uint32_t snapshotStreamId = 0;
5266 for (size_t i = 0; i < request->num_output_buffers; i++) {
5267 const camera3_stream_buffer_t& output = request->output_buffers[i];
5268 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5269
Emilian Peev7650c122017-01-19 08:24:33 -08005270 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5271 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005272 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005273 blob_request = 1;
5274 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5275 }
5276
5277 if (output.acquire_fence != -1) {
5278 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5279 close(output.acquire_fence);
5280 if (rc != OK) {
5281 LOGE("sync wait failed %d", rc);
5282 pthread_mutex_unlock(&mMutex);
5283 return rc;
5284 }
5285 }
5286
Emilian Peev0f3c3162017-03-15 12:57:46 +00005287 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5288 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005289 depthRequestPresent = true;
5290 continue;
5291 }
5292
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005293 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005294 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005295
5296 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5297 isVidBufRequested = true;
5298 }
5299 }
5300
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005301 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5302 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5303 itr++) {
5304 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5305 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5306 channel->getStreamID(channel->getStreamTypeMask());
5307
5308 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5309 isVidBufRequested = true;
5310 }
5311 }
5312
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005314 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005315 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005316 }
5317 if (blob_request && mRawDumpChannel) {
5318 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005319 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005320 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005321 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005322 }
5323
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005324 {
5325 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5326 // Request a RAW buffer if
5327 // 1. mHdrPlusRawSrcChannel is valid.
5328 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5329 // 3. There is no pending HDR+ request.
5330 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5331 mHdrPlusPendingRequests.size() == 0) {
5332 streamsArray.stream_request[streamsArray.num_streams].streamID =
5333 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5334 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5335 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005336 }
5337
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005338 //extract capture intent
5339 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5340 mCaptureIntent =
5341 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5342 }
5343
5344 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5345 mCacMode =
5346 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5347 }
5348
5349 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005350 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005351
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005352 {
5353 Mutex::Autolock l(gHdrPlusClientLock);
5354 // If this request has a still capture intent, try to submit an HDR+ request.
5355 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5356 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5357 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5358 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005359 }
5360
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005361 if (hdrPlusRequest) {
5362 // For a HDR+ request, just set the frame parameters.
5363 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5364 if (rc < 0) {
5365 LOGE("fail to set frame parameters");
5366 pthread_mutex_unlock(&mMutex);
5367 return rc;
5368 }
5369 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005370 /* Parse the settings:
5371 * - For every request in NORMAL MODE
5372 * - For every request in HFR mode during preview only case
5373 * - For first request of every batch in HFR mode during video
5374 * recording. In batchmode the same settings except frame number is
5375 * repeated in each request of the batch.
5376 */
5377 if (!mBatchSize ||
5378 (mBatchSize && !isVidBufRequested) ||
5379 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005380 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005381 if (rc < 0) {
5382 LOGE("fail to set frame parameters");
5383 pthread_mutex_unlock(&mMutex);
5384 return rc;
5385 }
5386 }
5387 /* For batchMode HFR, setFrameParameters is not called for every
5388 * request. But only frame number of the latest request is parsed.
5389 * Keep track of first and last frame numbers in a batch so that
5390 * metadata for the frame numbers of batch can be duplicated in
5391 * handleBatchMetadta */
5392 if (mBatchSize) {
5393 if (!mToBeQueuedVidBufs) {
5394 //start of the batch
5395 mFirstFrameNumberInBatch = request->frame_number;
5396 }
5397 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5398 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5399 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005400 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005401 return BAD_VALUE;
5402 }
5403 }
5404 if (mNeedSensorRestart) {
5405 /* Unlock the mutex as restartSensor waits on the channels to be
5406 * stopped, which in turn calls stream callback functions -
5407 * handleBufferWithLock and handleMetadataWithLock */
5408 pthread_mutex_unlock(&mMutex);
5409 rc = dynamicUpdateMetaStreamInfo();
5410 if (rc != NO_ERROR) {
5411 LOGE("Restarting the sensor failed");
5412 return BAD_VALUE;
5413 }
5414 mNeedSensorRestart = false;
5415 pthread_mutex_lock(&mMutex);
5416 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005417 if(mResetInstantAEC) {
5418 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5419 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5420 mResetInstantAEC = false;
5421 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005422 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 if (request->input_buffer->acquire_fence != -1) {
5424 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5425 close(request->input_buffer->acquire_fence);
5426 if (rc != OK) {
5427 LOGE("input buffer sync wait failed %d", rc);
5428 pthread_mutex_unlock(&mMutex);
5429 return rc;
5430 }
5431 }
5432 }
5433
5434 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5435 mLastCustIntentFrmNum = frameNumber;
5436 }
5437 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005438 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005439 pendingRequestIterator latestRequest;
5440 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005441 pendingRequest.num_buffers = depthRequestPresent ?
5442 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005443 pendingRequest.request_id = request_id;
5444 pendingRequest.blob_request = blob_request;
5445 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005446 if (request->input_buffer) {
5447 pendingRequest.input_buffer =
5448 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5449 *(pendingRequest.input_buffer) = *(request->input_buffer);
5450 pInputBuffer = pendingRequest.input_buffer;
5451 } else {
5452 pendingRequest.input_buffer = NULL;
5453 pInputBuffer = NULL;
5454 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005455 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005456
5457 pendingRequest.pipeline_depth = 0;
5458 pendingRequest.partial_result_cnt = 0;
5459 extractJpegMetadata(mCurJpegMeta, request);
5460 pendingRequest.jpegMetadata = mCurJpegMeta;
5461 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5462 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005463 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005464 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5465 mHybridAeEnable =
5466 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5467 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005468
5469 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5470 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005471 /* DevCamDebug metadata processCaptureRequest */
5472 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5473 mDevCamDebugMetaEnable =
5474 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5475 }
5476 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5477 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005478
5479 //extract CAC info
5480 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5481 mCacMode =
5482 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5483 }
5484 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005485 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005486
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005487 // extract enableZsl info
5488 if (gExposeEnableZslKey) {
5489 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5490 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5491 mZslEnabled = pendingRequest.enableZsl;
5492 } else {
5493 pendingRequest.enableZsl = mZslEnabled;
5494 }
5495 }
5496
Thierry Strudel3d639192016-09-09 11:52:26 -07005497 PendingBuffersInRequest bufsForCurRequest;
5498 bufsForCurRequest.frame_number = frameNumber;
5499 // Mark current timestamp for the new request
5500 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005501 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005502
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005503 if (hdrPlusRequest) {
5504 // Save settings for this request.
5505 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5506 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5507
5508 // Add to pending HDR+ request queue.
5509 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5510 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5511
5512 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5513 }
5514
Thierry Strudel3d639192016-09-09 11:52:26 -07005515 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005516 if ((request->output_buffers[i].stream->data_space ==
5517 HAL_DATASPACE_DEPTH) &&
5518 (HAL_PIXEL_FORMAT_BLOB ==
5519 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005520 continue;
5521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005522 RequestedBufferInfo requestedBuf;
5523 memset(&requestedBuf, 0, sizeof(requestedBuf));
5524 requestedBuf.stream = request->output_buffers[i].stream;
5525 requestedBuf.buffer = NULL;
5526 pendingRequest.buffers.push_back(requestedBuf);
5527
5528 // Add to buffer handle the pending buffers list
5529 PendingBufferInfo bufferInfo;
5530 bufferInfo.buffer = request->output_buffers[i].buffer;
5531 bufferInfo.stream = request->output_buffers[i].stream;
5532 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5533 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5534 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5535 frameNumber, bufferInfo.buffer,
5536 channel->getStreamTypeMask(), bufferInfo.stream->format);
5537 }
5538 // Add this request packet into mPendingBuffersMap
5539 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5540 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5541 mPendingBuffersMap.get_num_overall_buffers());
5542
5543 latestRequest = mPendingRequestsList.insert(
5544 mPendingRequestsList.end(), pendingRequest);
5545 if(mFlush) {
5546 LOGI("mFlush is true");
5547 pthread_mutex_unlock(&mMutex);
5548 return NO_ERROR;
5549 }
5550
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005551 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5552 // channel.
5553 if (!hdrPlusRequest) {
5554 int indexUsed;
5555 // Notify metadata channel we receive a request
5556 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005557
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005558 if(request->input_buffer != NULL){
5559 LOGD("Input request, frame_number %d", frameNumber);
5560 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5561 if (NO_ERROR != rc) {
5562 LOGE("fail to set reproc parameters");
5563 pthread_mutex_unlock(&mMutex);
5564 return rc;
5565 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005566 }
5567
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005568 // Call request on other streams
5569 uint32_t streams_need_metadata = 0;
5570 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5571 for (size_t i = 0; i < request->num_output_buffers; i++) {
5572 const camera3_stream_buffer_t& output = request->output_buffers[i];
5573 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5574
5575 if (channel == NULL) {
5576 LOGW("invalid channel pointer for stream");
5577 continue;
5578 }
5579
5580 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5581 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5582 output.buffer, request->input_buffer, frameNumber);
5583 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005584 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005585 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5586 if (rc < 0) {
5587 LOGE("Fail to request on picture channel");
5588 pthread_mutex_unlock(&mMutex);
5589 return rc;
5590 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005591 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005592 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5593 assert(NULL != mDepthChannel);
5594 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005595
Emilian Peev7650c122017-01-19 08:24:33 -08005596 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5597 if (rc < 0) {
5598 LOGE("Fail to map on depth buffer");
5599 pthread_mutex_unlock(&mMutex);
5600 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005601 }
Emilian Peev7650c122017-01-19 08:24:33 -08005602 } else {
5603 LOGD("snapshot request with buffer %p, frame_number %d",
5604 output.buffer, frameNumber);
5605 if (!request->settings) {
5606 rc = channel->request(output.buffer, frameNumber,
5607 NULL, mPrevParameters, indexUsed);
5608 } else {
5609 rc = channel->request(output.buffer, frameNumber,
5610 NULL, mParameters, indexUsed);
5611 }
5612 if (rc < 0) {
5613 LOGE("Fail to request on picture channel");
5614 pthread_mutex_unlock(&mMutex);
5615 return rc;
5616 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005617
Emilian Peev7650c122017-01-19 08:24:33 -08005618 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5619 uint32_t j = 0;
5620 for (j = 0; j < streamsArray.num_streams; j++) {
5621 if (streamsArray.stream_request[j].streamID == streamId) {
5622 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5623 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5624 else
5625 streamsArray.stream_request[j].buf_index = indexUsed;
5626 break;
5627 }
5628 }
5629 if (j == streamsArray.num_streams) {
5630 LOGE("Did not find matching stream to update index");
5631 assert(0);
5632 }
5633
5634 pendingBufferIter->need_metadata = true;
5635 streams_need_metadata++;
5636 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005637 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005638 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5639 bool needMetadata = false;
5640 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5641 rc = yuvChannel->request(output.buffer, frameNumber,
5642 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5643 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005644 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005645 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005646 pthread_mutex_unlock(&mMutex);
5647 return rc;
5648 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005649
5650 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5651 uint32_t j = 0;
5652 for (j = 0; j < streamsArray.num_streams; j++) {
5653 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005654 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5655 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5656 else
5657 streamsArray.stream_request[j].buf_index = indexUsed;
5658 break;
5659 }
5660 }
5661 if (j == streamsArray.num_streams) {
5662 LOGE("Did not find matching stream to update index");
5663 assert(0);
5664 }
5665
5666 pendingBufferIter->need_metadata = needMetadata;
5667 if (needMetadata)
5668 streams_need_metadata += 1;
5669 LOGD("calling YUV channel request, need_metadata is %d",
5670 needMetadata);
5671 } else {
5672 LOGD("request with buffer %p, frame_number %d",
5673 output.buffer, frameNumber);
5674
5675 rc = channel->request(output.buffer, frameNumber, indexUsed);
5676
5677 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5678 uint32_t j = 0;
5679 for (j = 0; j < streamsArray.num_streams; j++) {
5680 if (streamsArray.stream_request[j].streamID == streamId) {
5681 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5682 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5683 else
5684 streamsArray.stream_request[j].buf_index = indexUsed;
5685 break;
5686 }
5687 }
5688 if (j == streamsArray.num_streams) {
5689 LOGE("Did not find matching stream to update index");
5690 assert(0);
5691 }
5692
5693 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5694 && mBatchSize) {
5695 mToBeQueuedVidBufs++;
5696 if (mToBeQueuedVidBufs == mBatchSize) {
5697 channel->queueBatchBuf();
5698 }
5699 }
5700 if (rc < 0) {
5701 LOGE("request failed");
5702 pthread_mutex_unlock(&mMutex);
5703 return rc;
5704 }
5705 }
5706 pendingBufferIter++;
5707 }
5708
5709 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5710 itr++) {
5711 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5712
5713 if (channel == NULL) {
5714 LOGE("invalid channel pointer for stream");
5715 assert(0);
5716 return BAD_VALUE;
5717 }
5718
5719 InternalRequest requestedStream;
5720 requestedStream = (*itr);
5721
5722
5723 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5724 LOGD("snapshot request internally input buffer %p, frame_number %d",
5725 request->input_buffer, frameNumber);
5726 if(request->input_buffer != NULL){
5727 rc = channel->request(NULL, frameNumber,
5728 pInputBuffer, &mReprocMeta, indexUsed, true,
5729 requestedStream.meteringOnly);
5730 if (rc < 0) {
5731 LOGE("Fail to request on picture channel");
5732 pthread_mutex_unlock(&mMutex);
5733 return rc;
5734 }
5735 } else {
5736 LOGD("snapshot request with frame_number %d", frameNumber);
5737 if (!request->settings) {
5738 rc = channel->request(NULL, frameNumber,
5739 NULL, mPrevParameters, indexUsed, true,
5740 requestedStream.meteringOnly);
5741 } else {
5742 rc = channel->request(NULL, frameNumber,
5743 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5744 }
5745 if (rc < 0) {
5746 LOGE("Fail to request on picture channel");
5747 pthread_mutex_unlock(&mMutex);
5748 return rc;
5749 }
5750
5751 if ((*itr).meteringOnly != 1) {
5752 requestedStream.need_metadata = 1;
5753 streams_need_metadata++;
5754 }
5755 }
5756
5757 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5758 uint32_t j = 0;
5759 for (j = 0; j < streamsArray.num_streams; j++) {
5760 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005761 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5762 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5763 else
5764 streamsArray.stream_request[j].buf_index = indexUsed;
5765 break;
5766 }
5767 }
5768 if (j == streamsArray.num_streams) {
5769 LOGE("Did not find matching stream to update index");
5770 assert(0);
5771 }
5772
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005773 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005774 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005775 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005776 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005777 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005778 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005779 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005780
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005781 //If 2 streams have need_metadata set to true, fail the request, unless
5782 //we copy/reference count the metadata buffer
5783 if (streams_need_metadata > 1) {
5784 LOGE("not supporting request in which two streams requires"
5785 " 2 HAL metadata for reprocessing");
5786 pthread_mutex_unlock(&mMutex);
5787 return -EINVAL;
5788 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005789
Emilian Peev7650c122017-01-19 08:24:33 -08005790 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5791 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5792 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5793 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5794 pthread_mutex_unlock(&mMutex);
5795 return BAD_VALUE;
5796 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005797 if (request->input_buffer == NULL) {
5798 /* Set the parameters to backend:
5799 * - For every request in NORMAL MODE
5800 * - For every request in HFR mode during preview only case
5801 * - Once every batch in HFR mode during video recording
5802 */
5803 if (!mBatchSize ||
5804 (mBatchSize && !isVidBufRequested) ||
5805 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5806 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5807 mBatchSize, isVidBufRequested,
5808 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005809
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005810 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5811 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5812 uint32_t m = 0;
5813 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5814 if (streamsArray.stream_request[k].streamID ==
5815 mBatchedStreamsArray.stream_request[m].streamID)
5816 break;
5817 }
5818 if (m == mBatchedStreamsArray.num_streams) {
5819 mBatchedStreamsArray.stream_request\
5820 [mBatchedStreamsArray.num_streams].streamID =
5821 streamsArray.stream_request[k].streamID;
5822 mBatchedStreamsArray.stream_request\
5823 [mBatchedStreamsArray.num_streams].buf_index =
5824 streamsArray.stream_request[k].buf_index;
5825 mBatchedStreamsArray.num_streams =
5826 mBatchedStreamsArray.num_streams + 1;
5827 }
5828 }
5829 streamsArray = mBatchedStreamsArray;
5830 }
5831 /* Update stream id of all the requested buffers */
5832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5833 streamsArray)) {
5834 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005835 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005836 return BAD_VALUE;
5837 }
5838
5839 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5840 mParameters);
5841 if (rc < 0) {
5842 LOGE("set_parms failed");
5843 }
5844 /* reset to zero coz, the batch is queued */
5845 mToBeQueuedVidBufs = 0;
5846 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5847 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5848 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005849 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5850 uint32_t m = 0;
5851 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5852 if (streamsArray.stream_request[k].streamID ==
5853 mBatchedStreamsArray.stream_request[m].streamID)
5854 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005855 }
5856 if (m == mBatchedStreamsArray.num_streams) {
5857 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5858 streamID = streamsArray.stream_request[k].streamID;
5859 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5860 buf_index = streamsArray.stream_request[k].buf_index;
5861 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5862 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005863 }
5864 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005865 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005866
5867 // Start all streams after the first setting is sent, so that the
5868 // setting can be applied sooner: (0 + apply_delay)th frame.
5869 if (mState == CONFIGURED && mChannelHandle) {
5870 //Then start them.
5871 LOGH("Start META Channel");
5872 rc = mMetadataChannel->start();
5873 if (rc < 0) {
5874 LOGE("META channel start failed");
5875 pthread_mutex_unlock(&mMutex);
5876 return rc;
5877 }
5878
5879 if (mAnalysisChannel) {
5880 rc = mAnalysisChannel->start();
5881 if (rc < 0) {
5882 LOGE("Analysis channel start failed");
5883 mMetadataChannel->stop();
5884 pthread_mutex_unlock(&mMutex);
5885 return rc;
5886 }
5887 }
5888
5889 if (mSupportChannel) {
5890 rc = mSupportChannel->start();
5891 if (rc < 0) {
5892 LOGE("Support channel start failed");
5893 mMetadataChannel->stop();
5894 /* Although support and analysis are mutually exclusive today
5895 adding it in anycase for future proofing */
5896 if (mAnalysisChannel) {
5897 mAnalysisChannel->stop();
5898 }
5899 pthread_mutex_unlock(&mMutex);
5900 return rc;
5901 }
5902 }
5903 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5904 it != mStreamInfo.end(); it++) {
5905 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5906 LOGH("Start Processing Channel mask=%d",
5907 channel->getStreamTypeMask());
5908 rc = channel->start();
5909 if (rc < 0) {
5910 LOGE("channel start failed");
5911 pthread_mutex_unlock(&mMutex);
5912 return rc;
5913 }
5914 }
5915
5916 if (mRawDumpChannel) {
5917 LOGD("Starting raw dump stream");
5918 rc = mRawDumpChannel->start();
5919 if (rc != NO_ERROR) {
5920 LOGE("Error Starting Raw Dump Channel");
5921 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5922 it != mStreamInfo.end(); it++) {
5923 QCamera3Channel *channel =
5924 (QCamera3Channel *)(*it)->stream->priv;
5925 LOGH("Stopping Processing Channel mask=%d",
5926 channel->getStreamTypeMask());
5927 channel->stop();
5928 }
5929 if (mSupportChannel)
5930 mSupportChannel->stop();
5931 if (mAnalysisChannel) {
5932 mAnalysisChannel->stop();
5933 }
5934 mMetadataChannel->stop();
5935 pthread_mutex_unlock(&mMutex);
5936 return rc;
5937 }
5938 }
5939
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005940 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005941 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005942 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005943 if (rc != NO_ERROR) {
5944 LOGE("start_channel failed %d", rc);
5945 pthread_mutex_unlock(&mMutex);
5946 return rc;
5947 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005948
5949 {
5950 // Configure Easel for stream on.
5951 Mutex::Autolock l(gHdrPlusClientLock);
5952 if (EaselManagerClientOpened) {
5953 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
5954 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
5955 if (rc != OK) {
5956 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5957 mCameraId, mSensorModeInfo.op_pixel_clk);
5958 pthread_mutex_unlock(&mMutex);
5959 return rc;
5960 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005961 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005962 }
5963 }
5964
5965 // Start sensor streaming.
5966 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5967 mChannelHandle);
5968 if (rc != NO_ERROR) {
5969 LOGE("start_sensor_stream_on failed %d", rc);
5970 pthread_mutex_unlock(&mMutex);
5971 return rc;
5972 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005973 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005974 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005975 }
5976
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005977 // Enable HDR+ mode for the first PREVIEW_INTENT request.
5978 {
5979 Mutex::Autolock l(gHdrPlusClientLock);
5980 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5981 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5982 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5983 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5984 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5985 rc = enableHdrPlusModeLocked();
5986 if (rc != OK) {
5987 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5988 pthread_mutex_unlock(&mMutex);
5989 return rc;
5990 }
5991
5992 mFirstPreviewIntentSeen = true;
5993 }
5994 }
5995
Thierry Strudel3d639192016-09-09 11:52:26 -07005996 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5997
5998 mState = STARTED;
5999 // Added a timed condition wait
6000 struct timespec ts;
6001 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006002 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006003 if (rc < 0) {
6004 isValidTimeout = 0;
6005 LOGE("Error reading the real time clock!!");
6006 }
6007 else {
6008 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006009 int64_t timeout = 5;
6010 {
6011 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6012 // If there is a pending HDR+ request, the following requests may be blocked until the
6013 // HDR+ request is done. So allow a longer timeout.
6014 if (mHdrPlusPendingRequests.size() > 0) {
6015 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6016 }
6017 }
6018 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006019 }
6020 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006021 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006022 (mState != ERROR) && (mState != DEINIT)) {
6023 if (!isValidTimeout) {
6024 LOGD("Blocking on conditional wait");
6025 pthread_cond_wait(&mRequestCond, &mMutex);
6026 }
6027 else {
6028 LOGD("Blocking on timed conditional wait");
6029 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6030 if (rc == ETIMEDOUT) {
6031 rc = -ENODEV;
6032 LOGE("Unblocked on timeout!!!!");
6033 break;
6034 }
6035 }
6036 LOGD("Unblocked");
6037 if (mWokenUpByDaemon) {
6038 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006039 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006040 break;
6041 }
6042 }
6043 pthread_mutex_unlock(&mMutex);
6044
6045 return rc;
6046}
6047
6048/*===========================================================================
6049 * FUNCTION : dump
6050 *
6051 * DESCRIPTION:
6052 *
6053 * PARAMETERS :
6054 *
6055 *
6056 * RETURN :
6057 *==========================================================================*/
6058void QCamera3HardwareInterface::dump(int fd)
6059{
6060 pthread_mutex_lock(&mMutex);
6061 dprintf(fd, "\n Camera HAL3 information Begin \n");
6062
6063 dprintf(fd, "\nNumber of pending requests: %zu \n",
6064 mPendingRequestsList.size());
6065 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6066 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6067 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6068 for(pendingRequestIterator i = mPendingRequestsList.begin();
6069 i != mPendingRequestsList.end(); i++) {
6070 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6071 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6072 i->input_buffer);
6073 }
6074 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6075 mPendingBuffersMap.get_num_overall_buffers());
6076 dprintf(fd, "-------+------------------\n");
6077 dprintf(fd, " Frame | Stream type mask \n");
6078 dprintf(fd, "-------+------------------\n");
6079 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6080 for(auto &j : req.mPendingBufferList) {
6081 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6082 dprintf(fd, " %5d | %11d \n",
6083 req.frame_number, channel->getStreamTypeMask());
6084 }
6085 }
6086 dprintf(fd, "-------+------------------\n");
6087
6088 dprintf(fd, "\nPending frame drop list: %zu\n",
6089 mPendingFrameDropList.size());
6090 dprintf(fd, "-------+-----------\n");
6091 dprintf(fd, " Frame | Stream ID \n");
6092 dprintf(fd, "-------+-----------\n");
6093 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6094 i != mPendingFrameDropList.end(); i++) {
6095 dprintf(fd, " %5d | %9d \n",
6096 i->frame_number, i->stream_ID);
6097 }
6098 dprintf(fd, "-------+-----------\n");
6099
6100 dprintf(fd, "\n Camera HAL3 information End \n");
6101
6102 /* use dumpsys media.camera as trigger to send update debug level event */
6103 mUpdateDebugLevel = true;
6104 pthread_mutex_unlock(&mMutex);
6105 return;
6106}
6107
6108/*===========================================================================
6109 * FUNCTION : flush
6110 *
6111 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6112 * conditionally restarts channels
6113 *
6114 * PARAMETERS :
6115 * @ restartChannels: re-start all channels
6116 *
6117 *
6118 * RETURN :
6119 * 0 on success
6120 * Error code on failure
6121 *==========================================================================*/
6122int QCamera3HardwareInterface::flush(bool restartChannels)
6123{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006124 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006125 int32_t rc = NO_ERROR;
6126
6127 LOGD("Unblocking Process Capture Request");
6128 pthread_mutex_lock(&mMutex);
6129 mFlush = true;
6130 pthread_mutex_unlock(&mMutex);
6131
6132 rc = stopAllChannels();
6133 // unlink of dualcam
6134 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006135 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6136 &m_pDualCamCmdPtr->bundle_info;
6137 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006138 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6139 pthread_mutex_lock(&gCamLock);
6140
6141 if (mIsMainCamera == 1) {
6142 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6143 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006144 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006145 // related session id should be session id of linked session
6146 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6147 } else {
6148 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6149 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006150 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006151 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6152 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006153 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006154 pthread_mutex_unlock(&gCamLock);
6155
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006156 rc = mCameraHandle->ops->set_dual_cam_cmd(
6157 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006158 if (rc < 0) {
6159 LOGE("Dualcam: Unlink failed, but still proceed to close");
6160 }
6161 }
6162
6163 if (rc < 0) {
6164 LOGE("stopAllChannels failed");
6165 return rc;
6166 }
6167 if (mChannelHandle) {
6168 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6169 mChannelHandle);
6170 }
6171
6172 // Reset bundle info
6173 rc = setBundleInfo();
6174 if (rc < 0) {
6175 LOGE("setBundleInfo failed %d", rc);
6176 return rc;
6177 }
6178
6179 // Mutex Lock
6180 pthread_mutex_lock(&mMutex);
6181
6182 // Unblock process_capture_request
6183 mPendingLiveRequest = 0;
6184 pthread_cond_signal(&mRequestCond);
6185
6186 rc = notifyErrorForPendingRequests();
6187 if (rc < 0) {
6188 LOGE("notifyErrorForPendingRequests failed");
6189 pthread_mutex_unlock(&mMutex);
6190 return rc;
6191 }
6192
6193 mFlush = false;
6194
6195 // Start the Streams/Channels
6196 if (restartChannels) {
6197 rc = startAllChannels();
6198 if (rc < 0) {
6199 LOGE("startAllChannels failed");
6200 pthread_mutex_unlock(&mMutex);
6201 return rc;
6202 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006203 if (mChannelHandle) {
6204 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006205 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006206 if (rc < 0) {
6207 LOGE("start_channel failed");
6208 pthread_mutex_unlock(&mMutex);
6209 return rc;
6210 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006211 }
6212 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006213 pthread_mutex_unlock(&mMutex);
6214
6215 return 0;
6216}
6217
6218/*===========================================================================
6219 * FUNCTION : flushPerf
6220 *
6221 * DESCRIPTION: This is the performance optimization version of flush that does
6222 * not use stream off, rather flushes the system
6223 *
6224 * PARAMETERS :
6225 *
6226 *
6227 * RETURN : 0 : success
6228 * -EINVAL: input is malformed (device is not valid)
6229 * -ENODEV: if the device has encountered a serious error
6230 *==========================================================================*/
6231int QCamera3HardwareInterface::flushPerf()
6232{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006233 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006234 int32_t rc = 0;
6235 struct timespec timeout;
6236 bool timed_wait = false;
6237
6238 pthread_mutex_lock(&mMutex);
6239 mFlushPerf = true;
6240 mPendingBuffersMap.numPendingBufsAtFlush =
6241 mPendingBuffersMap.get_num_overall_buffers();
6242 LOGD("Calling flush. Wait for %d buffers to return",
6243 mPendingBuffersMap.numPendingBufsAtFlush);
6244
6245 /* send the flush event to the backend */
6246 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6247 if (rc < 0) {
6248 LOGE("Error in flush: IOCTL failure");
6249 mFlushPerf = false;
6250 pthread_mutex_unlock(&mMutex);
6251 return -ENODEV;
6252 }
6253
6254 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6255 LOGD("No pending buffers in HAL, return flush");
6256 mFlushPerf = false;
6257 pthread_mutex_unlock(&mMutex);
6258 return rc;
6259 }
6260
6261 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006262 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006263 if (rc < 0) {
6264 LOGE("Error reading the real time clock, cannot use timed wait");
6265 } else {
6266 timeout.tv_sec += FLUSH_TIMEOUT;
6267 timed_wait = true;
6268 }
6269
6270 //Block on conditional variable
6271 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6272 LOGD("Waiting on mBuffersCond");
6273 if (!timed_wait) {
6274 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6275 if (rc != 0) {
6276 LOGE("pthread_cond_wait failed due to rc = %s",
6277 strerror(rc));
6278 break;
6279 }
6280 } else {
6281 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6282 if (rc != 0) {
6283 LOGE("pthread_cond_timedwait failed due to rc = %s",
6284 strerror(rc));
6285 break;
6286 }
6287 }
6288 }
6289 if (rc != 0) {
6290 mFlushPerf = false;
6291 pthread_mutex_unlock(&mMutex);
6292 return -ENODEV;
6293 }
6294
6295 LOGD("Received buffers, now safe to return them");
6296
6297 //make sure the channels handle flush
6298 //currently only required for the picture channel to release snapshot resources
6299 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6300 it != mStreamInfo.end(); it++) {
6301 QCamera3Channel *channel = (*it)->channel;
6302 if (channel) {
6303 rc = channel->flush();
6304 if (rc) {
6305 LOGE("Flushing the channels failed with error %d", rc);
6306 // even though the channel flush failed we need to continue and
6307 // return the buffers we have to the framework, however the return
6308 // value will be an error
6309 rc = -ENODEV;
6310 }
6311 }
6312 }
6313
6314 /* notify the frameworks and send errored results */
6315 rc = notifyErrorForPendingRequests();
6316 if (rc < 0) {
6317 LOGE("notifyErrorForPendingRequests failed");
6318 pthread_mutex_unlock(&mMutex);
6319 return rc;
6320 }
6321
6322 //unblock process_capture_request
6323 mPendingLiveRequest = 0;
6324 unblockRequestIfNecessary();
6325
6326 mFlushPerf = false;
6327 pthread_mutex_unlock(&mMutex);
6328 LOGD ("Flush Operation complete. rc = %d", rc);
6329 return rc;
6330}
6331
6332/*===========================================================================
6333 * FUNCTION : handleCameraDeviceError
6334 *
6335 * DESCRIPTION: This function calls internal flush and notifies the error to
6336 * framework and updates the state variable.
6337 *
6338 * PARAMETERS : None
6339 *
6340 * RETURN : NO_ERROR on Success
6341 * Error code on failure
6342 *==========================================================================*/
6343int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6344{
6345 int32_t rc = NO_ERROR;
6346
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006347 {
6348 Mutex::Autolock lock(mFlushLock);
6349 pthread_mutex_lock(&mMutex);
6350 if (mState != ERROR) {
6351 //if mState != ERROR, nothing to be done
6352 pthread_mutex_unlock(&mMutex);
6353 return NO_ERROR;
6354 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006355 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006356
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006357 rc = flush(false /* restart channels */);
6358 if (NO_ERROR != rc) {
6359 LOGE("internal flush to handle mState = ERROR failed");
6360 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006361
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006362 pthread_mutex_lock(&mMutex);
6363 mState = DEINIT;
6364 pthread_mutex_unlock(&mMutex);
6365 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006366
6367 camera3_notify_msg_t notify_msg;
6368 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6369 notify_msg.type = CAMERA3_MSG_ERROR;
6370 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6371 notify_msg.message.error.error_stream = NULL;
6372 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006373 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006374
6375 return rc;
6376}
6377
6378/*===========================================================================
6379 * FUNCTION : captureResultCb
6380 *
6381 * DESCRIPTION: Callback handler for all capture result
6382 * (streams, as well as metadata)
6383 *
6384 * PARAMETERS :
6385 * @metadata : metadata information
6386 * @buffer : actual gralloc buffer to be returned to frameworks.
6387 * NULL if metadata.
6388 *
6389 * RETURN : NONE
6390 *==========================================================================*/
6391void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6392 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6393{
6394 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006395 pthread_mutex_lock(&mMutex);
6396 uint8_t batchSize = mBatchSize;
6397 pthread_mutex_unlock(&mMutex);
6398 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006399 handleBatchMetadata(metadata_buf,
6400 true /* free_and_bufdone_meta_buf */);
6401 } else { /* mBatchSize = 0 */
6402 hdrPlusPerfLock(metadata_buf);
6403 pthread_mutex_lock(&mMutex);
6404 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006405 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006406 true /* last urgent frame of batch metadata */,
6407 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006408 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006409 pthread_mutex_unlock(&mMutex);
6410 }
6411 } else if (isInputBuffer) {
6412 pthread_mutex_lock(&mMutex);
6413 handleInputBufferWithLock(frame_number);
6414 pthread_mutex_unlock(&mMutex);
6415 } else {
6416 pthread_mutex_lock(&mMutex);
6417 handleBufferWithLock(buffer, frame_number);
6418 pthread_mutex_unlock(&mMutex);
6419 }
6420 return;
6421}
6422
6423/*===========================================================================
6424 * FUNCTION : getReprocessibleOutputStreamId
6425 *
6426 * DESCRIPTION: Get source output stream id for the input reprocess stream
6427 * based on size and format, which would be the largest
6428 * output stream if an input stream exists.
6429 *
6430 * PARAMETERS :
6431 * @id : return the stream id if found
6432 *
6433 * RETURN : int32_t type of status
6434 * NO_ERROR -- success
6435 * none-zero failure code
6436 *==========================================================================*/
6437int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6438{
6439 /* check if any output or bidirectional stream with the same size and format
6440 and return that stream */
6441 if ((mInputStreamInfo.dim.width > 0) &&
6442 (mInputStreamInfo.dim.height > 0)) {
6443 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6444 it != mStreamInfo.end(); it++) {
6445
6446 camera3_stream_t *stream = (*it)->stream;
6447 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6448 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6449 (stream->format == mInputStreamInfo.format)) {
6450 // Usage flag for an input stream and the source output stream
6451 // may be different.
6452 LOGD("Found reprocessible output stream! %p", *it);
6453 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6454 stream->usage, mInputStreamInfo.usage);
6455
6456 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6457 if (channel != NULL && channel->mStreams[0]) {
6458 id = channel->mStreams[0]->getMyServerID();
6459 return NO_ERROR;
6460 }
6461 }
6462 }
6463 } else {
6464 LOGD("No input stream, so no reprocessible output stream");
6465 }
6466 return NAME_NOT_FOUND;
6467}
6468
6469/*===========================================================================
6470 * FUNCTION : lookupFwkName
6471 *
6472 * DESCRIPTION: In case the enum is not same in fwk and backend
6473 * make sure the parameter is correctly propogated
6474 *
6475 * PARAMETERS :
6476 * @arr : map between the two enums
6477 * @len : len of the map
6478 * @hal_name : name of the hal_parm to map
6479 *
6480 * RETURN : int type of status
6481 * fwk_name -- success
6482 * none-zero failure code
6483 *==========================================================================*/
6484template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6485 size_t len, halType hal_name)
6486{
6487
6488 for (size_t i = 0; i < len; i++) {
6489 if (arr[i].hal_name == hal_name) {
6490 return arr[i].fwk_name;
6491 }
6492 }
6493
6494 /* Not able to find matching framework type is not necessarily
6495 * an error case. This happens when mm-camera supports more attributes
6496 * than the frameworks do */
6497 LOGH("Cannot find matching framework type");
6498 return NAME_NOT_FOUND;
6499}
6500
6501/*===========================================================================
6502 * FUNCTION : lookupHalName
6503 *
6504 * DESCRIPTION: In case the enum is not same in fwk and backend
6505 * make sure the parameter is correctly propogated
6506 *
6507 * PARAMETERS :
6508 * @arr : map between the two enums
6509 * @len : len of the map
6510 * @fwk_name : name of the hal_parm to map
6511 *
6512 * RETURN : int32_t type of status
6513 * hal_name -- success
6514 * none-zero failure code
6515 *==========================================================================*/
6516template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6517 size_t len, fwkType fwk_name)
6518{
6519 for (size_t i = 0; i < len; i++) {
6520 if (arr[i].fwk_name == fwk_name) {
6521 return arr[i].hal_name;
6522 }
6523 }
6524
6525 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6526 return NAME_NOT_FOUND;
6527}
6528
6529/*===========================================================================
6530 * FUNCTION : lookupProp
6531 *
6532 * DESCRIPTION: lookup a value by its name
6533 *
6534 * PARAMETERS :
6535 * @arr : map between the two enums
6536 * @len : size of the map
6537 * @name : name to be looked up
6538 *
6539 * RETURN : Value if found
6540 * CAM_CDS_MODE_MAX if not found
6541 *==========================================================================*/
6542template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6543 size_t len, const char *name)
6544{
6545 if (name) {
6546 for (size_t i = 0; i < len; i++) {
6547 if (!strcmp(arr[i].desc, name)) {
6548 return arr[i].val;
6549 }
6550 }
6551 }
6552 return CAM_CDS_MODE_MAX;
6553}
6554
6555/*===========================================================================
6556 *
6557 * DESCRIPTION:
6558 *
6559 * PARAMETERS :
6560 * @metadata : metadata information from callback
6561 * @timestamp: metadata buffer timestamp
6562 * @request_id: request id
6563 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006564 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006565 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6566 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006567 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006568 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6569 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006570 *
6571 * RETURN : camera_metadata_t*
6572 * metadata in a format specified by fwk
6573 *==========================================================================*/
6574camera_metadata_t*
6575QCamera3HardwareInterface::translateFromHalMetadata(
6576 metadata_buffer_t *metadata,
6577 nsecs_t timestamp,
6578 int32_t request_id,
6579 const CameraMetadata& jpegMetadata,
6580 uint8_t pipeline_depth,
6581 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006582 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006583 /* DevCamDebug metadata translateFromHalMetadata argument */
6584 uint8_t DevCamDebug_meta_enable,
6585 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006586 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006587 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006588 bool lastMetadataInBatch,
6589 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006590{
6591 CameraMetadata camMetadata;
6592 camera_metadata_t *resultMetadata;
6593
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006594 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006595 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6596 * Timestamp is needed because it's used for shutter notify calculation.
6597 * */
6598 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6599 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006600 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006601 }
6602
Thierry Strudel3d639192016-09-09 11:52:26 -07006603 if (jpegMetadata.entryCount())
6604 camMetadata.append(jpegMetadata);
6605
6606 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6607 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6608 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6609 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006610 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006611 if (mBatchSize == 0) {
6612 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6613 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6614 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006615
Samuel Ha68ba5172016-12-15 18:41:12 -08006616 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6617 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6618 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6619 // DevCamDebug metadata translateFromHalMetadata AF
6620 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6621 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6622 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6623 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6624 }
6625 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6626 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6627 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6628 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6629 }
6630 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6631 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6632 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6633 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6634 }
6635 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6636 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6637 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6638 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6639 }
6640 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6641 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6642 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6643 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6644 }
6645 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6646 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6647 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6648 *DevCamDebug_af_monitor_pdaf_target_pos;
6649 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6650 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6651 }
6652 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6653 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6654 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6655 *DevCamDebug_af_monitor_pdaf_confidence;
6656 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6657 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6658 }
6659 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6660 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6661 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6662 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6663 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6664 }
6665 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6666 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6667 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6668 *DevCamDebug_af_monitor_tof_target_pos;
6669 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6670 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6671 }
6672 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6673 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6674 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6675 *DevCamDebug_af_monitor_tof_confidence;
6676 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6677 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6678 }
6679 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6680 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6681 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6682 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6683 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6684 }
6685 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6686 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6687 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6688 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6689 &fwk_DevCamDebug_af_monitor_type_select, 1);
6690 }
6691 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6692 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6693 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6694 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6695 &fwk_DevCamDebug_af_monitor_refocus, 1);
6696 }
6697 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6698 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6699 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6700 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6701 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6702 }
6703 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6704 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6705 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6706 *DevCamDebug_af_search_pdaf_target_pos;
6707 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6708 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6709 }
6710 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6711 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6712 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6713 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6714 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6715 }
6716 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6717 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6718 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6719 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6720 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6721 }
6722 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6723 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6724 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6725 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6726 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6727 }
6728 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6729 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6730 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6731 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6732 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6733 }
6734 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6735 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6736 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6737 *DevCamDebug_af_search_tof_target_pos;
6738 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6739 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6740 }
6741 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6742 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6743 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6744 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6745 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6746 }
6747 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6748 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6749 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6750 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6751 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6752 }
6753 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6754 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6755 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6756 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6757 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6758 }
6759 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6760 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6761 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6762 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6763 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6764 }
6765 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6766 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6767 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6768 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6769 &fwk_DevCamDebug_af_search_type_select, 1);
6770 }
6771 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6772 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6773 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6774 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6775 &fwk_DevCamDebug_af_search_next_pos, 1);
6776 }
6777 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6778 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6779 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6780 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6781 &fwk_DevCamDebug_af_search_target_pos, 1);
6782 }
6783 // DevCamDebug metadata translateFromHalMetadata AEC
6784 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6785 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6786 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6787 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6788 }
6789 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6790 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6791 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6792 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6793 }
6794 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6795 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6796 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6797 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6798 }
6799 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6800 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6801 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6802 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6803 }
6804 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6805 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6806 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6807 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6808 }
6809 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6810 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6811 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6812 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6813 }
6814 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6815 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6816 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6817 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6818 }
6819 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6820 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6821 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6822 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6823 }
Samuel Ha34229982017-02-17 13:51:11 -08006824 // DevCamDebug metadata translateFromHalMetadata zzHDR
6825 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6826 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6827 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6828 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6829 }
6830 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6831 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006832 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006833 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6834 }
6835 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6836 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6837 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6838 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6839 }
6840 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6841 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006842 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006843 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6844 }
6845 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6846 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6847 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6848 *DevCamDebug_aec_hdr_sensitivity_ratio;
6849 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6850 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6851 }
6852 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6853 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6854 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6855 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6856 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6857 }
6858 // DevCamDebug metadata translateFromHalMetadata ADRC
6859 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6860 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6861 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6862 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6863 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6864 }
6865 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6866 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6867 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6868 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6869 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6870 }
6871 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6872 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6873 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6874 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6875 }
6876 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6877 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6878 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6879 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6880 }
6881 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6882 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6883 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6884 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6885 }
6886 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6887 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6888 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6889 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6890 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006891 // DevCamDebug metadata translateFromHalMetadata AWB
6892 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6893 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6894 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6895 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6896 }
6897 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6898 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6899 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6900 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6901 }
6902 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6903 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6904 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6905 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6906 }
6907 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6908 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6909 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6910 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6911 }
6912 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6913 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6914 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6915 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6916 }
6917 }
6918 // atrace_end(ATRACE_TAG_ALWAYS);
6919
Thierry Strudel3d639192016-09-09 11:52:26 -07006920 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6921 int64_t fwk_frame_number = *frame_number;
6922 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6923 }
6924
6925 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6926 int32_t fps_range[2];
6927 fps_range[0] = (int32_t)float_range->min_fps;
6928 fps_range[1] = (int32_t)float_range->max_fps;
6929 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6930 fps_range, 2);
6931 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6932 fps_range[0], fps_range[1]);
6933 }
6934
6935 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6936 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6937 }
6938
6939 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6940 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6941 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6942 *sceneMode);
6943 if (NAME_NOT_FOUND != val) {
6944 uint8_t fwkSceneMode = (uint8_t)val;
6945 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6946 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6947 fwkSceneMode);
6948 }
6949 }
6950
6951 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6952 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6953 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6954 }
6955
6956 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6957 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6958 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6959 }
6960
6961 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6962 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6963 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6964 }
6965
6966 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6967 CAM_INTF_META_EDGE_MODE, metadata) {
6968 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6969 }
6970
6971 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6972 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6973 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6974 }
6975
6976 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6977 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6978 }
6979
6980 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6981 if (0 <= *flashState) {
6982 uint8_t fwk_flashState = (uint8_t) *flashState;
6983 if (!gCamCapability[mCameraId]->flash_available) {
6984 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6985 }
6986 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6987 }
6988 }
6989
6990 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6991 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6992 if (NAME_NOT_FOUND != val) {
6993 uint8_t fwk_flashMode = (uint8_t)val;
6994 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6995 }
6996 }
6997
6998 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6999 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7000 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7001 }
7002
7003 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7004 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7005 }
7006
7007 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7008 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7009 }
7010
7011 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7012 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7013 }
7014
7015 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7016 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7017 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7018 }
7019
7020 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7021 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7022 LOGD("fwk_videoStab = %d", fwk_videoStab);
7023 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7024 } else {
7025 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7026 // and so hardcoding the Video Stab result to OFF mode.
7027 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7028 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007029 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007030 }
7031
7032 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7033 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7034 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7035 }
7036
7037 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7038 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7039 }
7040
Thierry Strudel3d639192016-09-09 11:52:26 -07007041 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7042 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007043 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007044
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007045 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7046 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007047
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007048 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007049 blackLevelAppliedPattern->cam_black_level[0],
7050 blackLevelAppliedPattern->cam_black_level[1],
7051 blackLevelAppliedPattern->cam_black_level[2],
7052 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007053 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7054 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007055
7056#ifndef USE_HAL_3_3
7057 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307058 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007059 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307060 fwk_blackLevelInd[0] /= 16.0;
7061 fwk_blackLevelInd[1] /= 16.0;
7062 fwk_blackLevelInd[2] /= 16.0;
7063 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007064 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7065 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007066#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007067 }
7068
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007069#ifndef USE_HAL_3_3
7070 // Fixed whitelevel is used by ISP/Sensor
7071 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7072 &gCamCapability[mCameraId]->white_level, 1);
7073#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007074
7075 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7076 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7077 int32_t scalerCropRegion[4];
7078 scalerCropRegion[0] = hScalerCropRegion->left;
7079 scalerCropRegion[1] = hScalerCropRegion->top;
7080 scalerCropRegion[2] = hScalerCropRegion->width;
7081 scalerCropRegion[3] = hScalerCropRegion->height;
7082
7083 // Adjust crop region from sensor output coordinate system to active
7084 // array coordinate system.
7085 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7086 scalerCropRegion[2], scalerCropRegion[3]);
7087
7088 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7089 }
7090
7091 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7092 LOGD("sensorExpTime = %lld", *sensorExpTime);
7093 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7094 }
7095
7096 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7097 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7098 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7099 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7100 }
7101
7102 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7103 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7104 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7105 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7106 sensorRollingShutterSkew, 1);
7107 }
7108
7109 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7110 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7111 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7112
7113 //calculate the noise profile based on sensitivity
7114 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7115 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7116 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7117 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7118 noise_profile[i] = noise_profile_S;
7119 noise_profile[i+1] = noise_profile_O;
7120 }
7121 LOGD("noise model entry (S, O) is (%f, %f)",
7122 noise_profile_S, noise_profile_O);
7123 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7124 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7125 }
7126
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007127#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007128 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007129 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007130 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007131 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007132 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7133 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7134 }
7135 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007136#endif
7137
Thierry Strudel3d639192016-09-09 11:52:26 -07007138 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7139 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7140 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7141 }
7142
7143 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7144 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7145 *faceDetectMode);
7146 if (NAME_NOT_FOUND != val) {
7147 uint8_t fwk_faceDetectMode = (uint8_t)val;
7148 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7149
7150 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7151 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7152 CAM_INTF_META_FACE_DETECTION, metadata) {
7153 uint8_t numFaces = MIN(
7154 faceDetectionInfo->num_faces_detected, MAX_ROI);
7155 int32_t faceIds[MAX_ROI];
7156 uint8_t faceScores[MAX_ROI];
7157 int32_t faceRectangles[MAX_ROI * 4];
7158 int32_t faceLandmarks[MAX_ROI * 6];
7159 size_t j = 0, k = 0;
7160
7161 for (size_t i = 0; i < numFaces; i++) {
7162 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7163 // Adjust crop region from sensor output coordinate system to active
7164 // array coordinate system.
7165 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7166 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7167 rect.width, rect.height);
7168
7169 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7170 faceRectangles+j, -1);
7171
Jason Lee8ce36fa2017-04-19 19:40:37 -07007172 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7173 "bottom-right (%d, %d)",
7174 faceDetectionInfo->frame_id, i,
7175 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7176 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7177
Thierry Strudel3d639192016-09-09 11:52:26 -07007178 j+= 4;
7179 }
7180 if (numFaces <= 0) {
7181 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7182 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7183 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7184 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7185 }
7186
7187 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7188 numFaces);
7189 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7190 faceRectangles, numFaces * 4U);
7191 if (fwk_faceDetectMode ==
7192 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7193 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7194 CAM_INTF_META_FACE_LANDMARK, metadata) {
7195
7196 for (size_t i = 0; i < numFaces; i++) {
7197 // Map the co-ordinate sensor output coordinate system to active
7198 // array coordinate system.
7199 mCropRegionMapper.toActiveArray(
7200 landmarks->face_landmarks[i].left_eye_center.x,
7201 landmarks->face_landmarks[i].left_eye_center.y);
7202 mCropRegionMapper.toActiveArray(
7203 landmarks->face_landmarks[i].right_eye_center.x,
7204 landmarks->face_landmarks[i].right_eye_center.y);
7205 mCropRegionMapper.toActiveArray(
7206 landmarks->face_landmarks[i].mouth_center.x,
7207 landmarks->face_landmarks[i].mouth_center.y);
7208
7209 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007210
7211 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7212 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7213 faceDetectionInfo->frame_id, i,
7214 faceLandmarks[k + LEFT_EYE_X],
7215 faceLandmarks[k + LEFT_EYE_Y],
7216 faceLandmarks[k + RIGHT_EYE_X],
7217 faceLandmarks[k + RIGHT_EYE_Y],
7218 faceLandmarks[k + MOUTH_X],
7219 faceLandmarks[k + MOUTH_Y]);
7220
Thierry Strudel04e026f2016-10-10 11:27:36 -07007221 k+= TOTAL_LANDMARK_INDICES;
7222 }
7223 } else {
7224 for (size_t i = 0; i < numFaces; i++) {
7225 setInvalidLandmarks(faceLandmarks+k);
7226 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007227 }
7228 }
7229
Jason Lee49619db2017-04-13 12:07:22 -07007230 for (size_t i = 0; i < numFaces; i++) {
7231 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7232
7233 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7234 faceDetectionInfo->frame_id, i, faceIds[i]);
7235 }
7236
Thierry Strudel3d639192016-09-09 11:52:26 -07007237 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7238 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7239 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007240 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007241 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7242 CAM_INTF_META_FACE_BLINK, metadata) {
7243 uint8_t detected[MAX_ROI];
7244 uint8_t degree[MAX_ROI * 2];
7245 for (size_t i = 0; i < numFaces; i++) {
7246 detected[i] = blinks->blink[i].blink_detected;
7247 degree[2 * i] = blinks->blink[i].left_blink;
7248 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007249
Jason Lee49619db2017-04-13 12:07:22 -07007250 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7251 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7252 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7253 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007254 }
7255 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7256 detected, numFaces);
7257 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7258 degree, numFaces * 2);
7259 }
7260 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7261 CAM_INTF_META_FACE_SMILE, metadata) {
7262 uint8_t degree[MAX_ROI];
7263 uint8_t confidence[MAX_ROI];
7264 for (size_t i = 0; i < numFaces; i++) {
7265 degree[i] = smiles->smile[i].smile_degree;
7266 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007267
Jason Lee49619db2017-04-13 12:07:22 -07007268 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7269 "smile_degree=%d, smile_score=%d",
7270 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007271 }
7272 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7273 degree, numFaces);
7274 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7275 confidence, numFaces);
7276 }
7277 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7278 CAM_INTF_META_FACE_GAZE, metadata) {
7279 int8_t angle[MAX_ROI];
7280 int32_t direction[MAX_ROI * 3];
7281 int8_t degree[MAX_ROI * 2];
7282 for (size_t i = 0; i < numFaces; i++) {
7283 angle[i] = gazes->gaze[i].gaze_angle;
7284 direction[3 * i] = gazes->gaze[i].updown_dir;
7285 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7286 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7287 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7288 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007289
7290 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7291 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7292 "left_right_gaze=%d, top_bottom_gaze=%d",
7293 faceDetectionInfo->frame_id, i, angle[i],
7294 direction[3 * i], direction[3 * i + 1],
7295 direction[3 * i + 2],
7296 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007297 }
7298 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7299 (uint8_t *)angle, numFaces);
7300 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7301 direction, numFaces * 3);
7302 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7303 (uint8_t *)degree, numFaces * 2);
7304 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007305 }
7306 }
7307 }
7308 }
7309
7310 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7311 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007312 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007313 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007314 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007315
Shuzhen Wang14415f52016-11-16 18:26:18 -08007316 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7317 histogramBins = *histBins;
7318 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7319 }
7320
7321 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007322 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7323 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007324 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007325
7326 switch (stats_data->type) {
7327 case CAM_HISTOGRAM_TYPE_BAYER:
7328 switch (stats_data->bayer_stats.data_type) {
7329 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007330 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7331 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007332 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007333 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7334 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007335 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007336 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7337 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007338 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007339 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007340 case CAM_STATS_CHANNEL_R:
7341 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007342 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7343 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007344 }
7345 break;
7346 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007347 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007348 break;
7349 }
7350
Shuzhen Wang14415f52016-11-16 18:26:18 -08007351 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007352 }
7353 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007354 }
7355
7356 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7357 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7358 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7359 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7360 }
7361
7362 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7363 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7364 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7365 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7366 }
7367
7368 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7369 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7370 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7371 CAM_MAX_SHADING_MAP_HEIGHT);
7372 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7373 CAM_MAX_SHADING_MAP_WIDTH);
7374 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7375 lensShadingMap->lens_shading, 4U * map_width * map_height);
7376 }
7377
7378 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7379 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7380 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7381 }
7382
7383 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7384 //Populate CAM_INTF_META_TONEMAP_CURVES
7385 /* ch0 = G, ch 1 = B, ch 2 = R*/
7386 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7387 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7388 tonemap->tonemap_points_cnt,
7389 CAM_MAX_TONEMAP_CURVE_SIZE);
7390 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7391 }
7392
7393 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7394 &tonemap->curves[0].tonemap_points[0][0],
7395 tonemap->tonemap_points_cnt * 2);
7396
7397 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7398 &tonemap->curves[1].tonemap_points[0][0],
7399 tonemap->tonemap_points_cnt * 2);
7400
7401 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7402 &tonemap->curves[2].tonemap_points[0][0],
7403 tonemap->tonemap_points_cnt * 2);
7404 }
7405
7406 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7407 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7408 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7409 CC_GAIN_MAX);
7410 }
7411
7412 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7413 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7414 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7415 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7416 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7417 }
7418
7419 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7420 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7421 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7422 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7423 toneCurve->tonemap_points_cnt,
7424 CAM_MAX_TONEMAP_CURVE_SIZE);
7425 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7426 }
7427 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7428 (float*)toneCurve->curve.tonemap_points,
7429 toneCurve->tonemap_points_cnt * 2);
7430 }
7431
7432 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7433 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7434 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7435 predColorCorrectionGains->gains, 4);
7436 }
7437
7438 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7439 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7440 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7441 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7442 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7443 }
7444
7445 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7446 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7447 }
7448
7449 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7450 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7451 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7452 }
7453
7454 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7455 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7456 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7457 }
7458
7459 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7460 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7461 *effectMode);
7462 if (NAME_NOT_FOUND != val) {
7463 uint8_t fwk_effectMode = (uint8_t)val;
7464 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7465 }
7466 }
7467
7468 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7469 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7470 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7471 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7472 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7473 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7474 }
7475 int32_t fwk_testPatternData[4];
7476 fwk_testPatternData[0] = testPatternData->r;
7477 fwk_testPatternData[3] = testPatternData->b;
7478 switch (gCamCapability[mCameraId]->color_arrangement) {
7479 case CAM_FILTER_ARRANGEMENT_RGGB:
7480 case CAM_FILTER_ARRANGEMENT_GRBG:
7481 fwk_testPatternData[1] = testPatternData->gr;
7482 fwk_testPatternData[2] = testPatternData->gb;
7483 break;
7484 case CAM_FILTER_ARRANGEMENT_GBRG:
7485 case CAM_FILTER_ARRANGEMENT_BGGR:
7486 fwk_testPatternData[2] = testPatternData->gr;
7487 fwk_testPatternData[1] = testPatternData->gb;
7488 break;
7489 default:
7490 LOGE("color arrangement %d is not supported",
7491 gCamCapability[mCameraId]->color_arrangement);
7492 break;
7493 }
7494 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7495 }
7496
7497 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7498 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7499 }
7500
7501 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7502 String8 str((const char *)gps_methods);
7503 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7504 }
7505
7506 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7507 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7508 }
7509
7510 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7511 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7512 }
7513
7514 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7515 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7516 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7517 }
7518
7519 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7520 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7521 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7522 }
7523
7524 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7525 int32_t fwk_thumb_size[2];
7526 fwk_thumb_size[0] = thumb_size->width;
7527 fwk_thumb_size[1] = thumb_size->height;
7528 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7529 }
7530
7531 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7532 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7533 privateData,
7534 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7535 }
7536
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007537 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007538 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007539 meteringMode, 1);
7540 }
7541
Thierry Strudel54dc9782017-02-15 12:12:10 -08007542 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7543 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7544 LOGD("hdr_scene_data: %d %f\n",
7545 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7546 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7547 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7548 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7549 &isHdr, 1);
7550 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7551 &isHdrConfidence, 1);
7552 }
7553
7554
7555
Thierry Strudel3d639192016-09-09 11:52:26 -07007556 if (metadata->is_tuning_params_valid) {
7557 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7558 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7559 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7560
7561
7562 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7563 sizeof(uint32_t));
7564 data += sizeof(uint32_t);
7565
7566 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7567 sizeof(uint32_t));
7568 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7569 data += sizeof(uint32_t);
7570
7571 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7572 sizeof(uint32_t));
7573 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7574 data += sizeof(uint32_t);
7575
7576 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7577 sizeof(uint32_t));
7578 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7579 data += sizeof(uint32_t);
7580
7581 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7582 sizeof(uint32_t));
7583 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7584 data += sizeof(uint32_t);
7585
7586 metadata->tuning_params.tuning_mod3_data_size = 0;
7587 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7588 sizeof(uint32_t));
7589 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7590 data += sizeof(uint32_t);
7591
7592 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7593 TUNING_SENSOR_DATA_MAX);
7594 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7595 count);
7596 data += count;
7597
7598 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7599 TUNING_VFE_DATA_MAX);
7600 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7601 count);
7602 data += count;
7603
7604 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7605 TUNING_CPP_DATA_MAX);
7606 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7607 count);
7608 data += count;
7609
7610 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7611 TUNING_CAC_DATA_MAX);
7612 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7613 count);
7614 data += count;
7615
7616 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7617 (int32_t *)(void *)tuning_meta_data_blob,
7618 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7619 }
7620
7621 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7622 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7623 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7624 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7625 NEUTRAL_COL_POINTS);
7626 }
7627
7628 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7629 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7630 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7631 }
7632
7633 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7634 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7635 // Adjust crop region from sensor output coordinate system to active
7636 // array coordinate system.
7637 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7638 hAeRegions->rect.width, hAeRegions->rect.height);
7639
7640 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7641 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7642 REGIONS_TUPLE_COUNT);
7643 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7644 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7645 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7646 hAeRegions->rect.height);
7647 }
7648
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007649 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7650 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7651 if (NAME_NOT_FOUND != val) {
7652 uint8_t fwkAfMode = (uint8_t)val;
7653 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7654 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7655 } else {
7656 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7657 val);
7658 }
7659 }
7660
Thierry Strudel3d639192016-09-09 11:52:26 -07007661 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7662 uint8_t fwk_afState = (uint8_t) *afState;
7663 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007664 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007665 }
7666
7667 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7668 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7669 }
7670
7671 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7672 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7673 }
7674
7675 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7676 uint8_t fwk_lensState = *lensState;
7677 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7678 }
7679
Thierry Strudel3d639192016-09-09 11:52:26 -07007680
7681 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007682 uint32_t ab_mode = *hal_ab_mode;
7683 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7684 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7685 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7686 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007687 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007688 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007689 if (NAME_NOT_FOUND != val) {
7690 uint8_t fwk_ab_mode = (uint8_t)val;
7691 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7692 }
7693 }
7694
7695 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7696 int val = lookupFwkName(SCENE_MODES_MAP,
7697 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7698 if (NAME_NOT_FOUND != val) {
7699 uint8_t fwkBestshotMode = (uint8_t)val;
7700 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7701 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7702 } else {
7703 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7704 }
7705 }
7706
7707 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7708 uint8_t fwk_mode = (uint8_t) *mode;
7709 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7710 }
7711
7712 /* Constant metadata values to be update*/
7713 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7714 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7715
7716 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7717 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7718
7719 int32_t hotPixelMap[2];
7720 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7721
7722 // CDS
7723 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7724 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7725 }
7726
Thierry Strudel04e026f2016-10-10 11:27:36 -07007727 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7728 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007729 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007730 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7731 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7732 } else {
7733 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7734 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007735
7736 if(fwk_hdr != curr_hdr_state) {
7737 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7738 if(fwk_hdr)
7739 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7740 else
7741 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7742 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007743 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7744 }
7745
Thierry Strudel54dc9782017-02-15 12:12:10 -08007746 //binning correction
7747 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7748 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7749 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7750 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7751 }
7752
Thierry Strudel04e026f2016-10-10 11:27:36 -07007753 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007754 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007755 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7756 int8_t is_ir_on = 0;
7757
7758 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7759 if(is_ir_on != curr_ir_state) {
7760 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7761 if(is_ir_on)
7762 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7763 else
7764 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7765 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007766 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007767 }
7768
Thierry Strudel269c81a2016-10-12 12:13:59 -07007769 // AEC SPEED
7770 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7771 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7772 }
7773
7774 // AWB SPEED
7775 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7776 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7777 }
7778
Thierry Strudel3d639192016-09-09 11:52:26 -07007779 // TNR
7780 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7781 uint8_t tnr_enable = tnr->denoise_enable;
7782 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007783 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7784 int8_t is_tnr_on = 0;
7785
7786 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7787 if(is_tnr_on != curr_tnr_state) {
7788 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7789 if(is_tnr_on)
7790 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7791 else
7792 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7793 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007794
7795 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7796 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7797 }
7798
7799 // Reprocess crop data
7800 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7801 uint8_t cnt = crop_data->num_of_streams;
7802 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7803 // mm-qcamera-daemon only posts crop_data for streams
7804 // not linked to pproc. So no valid crop metadata is not
7805 // necessarily an error case.
7806 LOGD("No valid crop metadata entries");
7807 } else {
7808 uint32_t reproc_stream_id;
7809 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7810 LOGD("No reprocessible stream found, ignore crop data");
7811 } else {
7812 int rc = NO_ERROR;
7813 Vector<int32_t> roi_map;
7814 int32_t *crop = new int32_t[cnt*4];
7815 if (NULL == crop) {
7816 rc = NO_MEMORY;
7817 }
7818 if (NO_ERROR == rc) {
7819 int32_t streams_found = 0;
7820 for (size_t i = 0; i < cnt; i++) {
7821 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7822 if (pprocDone) {
7823 // HAL already does internal reprocessing,
7824 // either via reprocessing before JPEG encoding,
7825 // or offline postprocessing for pproc bypass case.
7826 crop[0] = 0;
7827 crop[1] = 0;
7828 crop[2] = mInputStreamInfo.dim.width;
7829 crop[3] = mInputStreamInfo.dim.height;
7830 } else {
7831 crop[0] = crop_data->crop_info[i].crop.left;
7832 crop[1] = crop_data->crop_info[i].crop.top;
7833 crop[2] = crop_data->crop_info[i].crop.width;
7834 crop[3] = crop_data->crop_info[i].crop.height;
7835 }
7836 roi_map.add(crop_data->crop_info[i].roi_map.left);
7837 roi_map.add(crop_data->crop_info[i].roi_map.top);
7838 roi_map.add(crop_data->crop_info[i].roi_map.width);
7839 roi_map.add(crop_data->crop_info[i].roi_map.height);
7840 streams_found++;
7841 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7842 crop[0], crop[1], crop[2], crop[3]);
7843 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7844 crop_data->crop_info[i].roi_map.left,
7845 crop_data->crop_info[i].roi_map.top,
7846 crop_data->crop_info[i].roi_map.width,
7847 crop_data->crop_info[i].roi_map.height);
7848 break;
7849
7850 }
7851 }
7852 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7853 &streams_found, 1);
7854 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7855 crop, (size_t)(streams_found * 4));
7856 if (roi_map.array()) {
7857 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7858 roi_map.array(), roi_map.size());
7859 }
7860 }
7861 if (crop) {
7862 delete [] crop;
7863 }
7864 }
7865 }
7866 }
7867
7868 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7869 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7870 // so hardcoding the CAC result to OFF mode.
7871 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7872 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7873 } else {
7874 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7875 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7876 *cacMode);
7877 if (NAME_NOT_FOUND != val) {
7878 uint8_t resultCacMode = (uint8_t)val;
7879 // check whether CAC result from CB is equal to Framework set CAC mode
7880 // If not equal then set the CAC mode came in corresponding request
7881 if (fwk_cacMode != resultCacMode) {
7882 resultCacMode = fwk_cacMode;
7883 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007884 //Check if CAC is disabled by property
7885 if (m_cacModeDisabled) {
7886 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7887 }
7888
Thierry Strudel3d639192016-09-09 11:52:26 -07007889 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7890 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7891 } else {
7892 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7893 }
7894 }
7895 }
7896
7897 // Post blob of cam_cds_data through vendor tag.
7898 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7899 uint8_t cnt = cdsInfo->num_of_streams;
7900 cam_cds_data_t cdsDataOverride;
7901 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7902 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7903 cdsDataOverride.num_of_streams = 1;
7904 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7905 uint32_t reproc_stream_id;
7906 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7907 LOGD("No reprocessible stream found, ignore cds data");
7908 } else {
7909 for (size_t i = 0; i < cnt; i++) {
7910 if (cdsInfo->cds_info[i].stream_id ==
7911 reproc_stream_id) {
7912 cdsDataOverride.cds_info[0].cds_enable =
7913 cdsInfo->cds_info[i].cds_enable;
7914 break;
7915 }
7916 }
7917 }
7918 } else {
7919 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7920 }
7921 camMetadata.update(QCAMERA3_CDS_INFO,
7922 (uint8_t *)&cdsDataOverride,
7923 sizeof(cam_cds_data_t));
7924 }
7925
7926 // Ldaf calibration data
7927 if (!mLdafCalibExist) {
7928 IF_META_AVAILABLE(uint32_t, ldafCalib,
7929 CAM_INTF_META_LDAF_EXIF, metadata) {
7930 mLdafCalibExist = true;
7931 mLdafCalib[0] = ldafCalib[0];
7932 mLdafCalib[1] = ldafCalib[1];
7933 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7934 ldafCalib[0], ldafCalib[1]);
7935 }
7936 }
7937
Thierry Strudel54dc9782017-02-15 12:12:10 -08007938 // EXIF debug data through vendor tag
7939 /*
7940 * Mobicat Mask can assume 3 values:
7941 * 1 refers to Mobicat data,
7942 * 2 refers to Stats Debug and Exif Debug Data
7943 * 3 refers to Mobicat and Stats Debug Data
7944 * We want to make sure that we are sending Exif debug data
7945 * only when Mobicat Mask is 2.
7946 */
7947 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7948 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7949 (uint8_t *)(void *)mExifParams.debug_params,
7950 sizeof(mm_jpeg_debug_exif_params_t));
7951 }
7952
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007953 // Reprocess and DDM debug data through vendor tag
7954 cam_reprocess_info_t repro_info;
7955 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007956 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7957 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007958 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007959 }
7960 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7961 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007962 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007963 }
7964 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7965 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007966 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007967 }
7968 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7969 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007970 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007971 }
7972 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7973 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007974 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007975 }
7976 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007977 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007978 }
7979 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7980 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007981 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007982 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007983 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7984 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7985 }
7986 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7987 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7988 }
7989 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7990 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007991
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007992 // INSTANT AEC MODE
7993 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7994 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7995 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7996 }
7997
Shuzhen Wange763e802016-03-31 10:24:29 -07007998 // AF scene change
7999 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8000 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8001 }
8002
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008003 // Enable ZSL
8004 if (enableZsl != nullptr) {
8005 uint8_t value = *enableZsl ?
8006 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8007 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8008 }
8009
Thierry Strudel3d639192016-09-09 11:52:26 -07008010 resultMetadata = camMetadata.release();
8011 return resultMetadata;
8012}
8013
8014/*===========================================================================
8015 * FUNCTION : saveExifParams
8016 *
8017 * DESCRIPTION:
8018 *
8019 * PARAMETERS :
8020 * @metadata : metadata information from callback
8021 *
8022 * RETURN : none
8023 *
8024 *==========================================================================*/
8025void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8026{
8027 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8028 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8029 if (mExifParams.debug_params) {
8030 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8031 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8032 }
8033 }
8034 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8035 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8036 if (mExifParams.debug_params) {
8037 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8038 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8039 }
8040 }
8041 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8042 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8043 if (mExifParams.debug_params) {
8044 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8045 mExifParams.debug_params->af_debug_params_valid = TRUE;
8046 }
8047 }
8048 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8049 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8050 if (mExifParams.debug_params) {
8051 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8052 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8053 }
8054 }
8055 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8056 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8057 if (mExifParams.debug_params) {
8058 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8059 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8060 }
8061 }
8062 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8063 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8064 if (mExifParams.debug_params) {
8065 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8066 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8067 }
8068 }
8069 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8070 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8071 if (mExifParams.debug_params) {
8072 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8073 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8074 }
8075 }
8076 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8077 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8078 if (mExifParams.debug_params) {
8079 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8080 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8081 }
8082 }
8083}
8084
8085/*===========================================================================
8086 * FUNCTION : get3AExifParams
8087 *
8088 * DESCRIPTION:
8089 *
8090 * PARAMETERS : none
8091 *
8092 *
8093 * RETURN : mm_jpeg_exif_params_t
8094 *
8095 *==========================================================================*/
8096mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8097{
8098 return mExifParams;
8099}
8100
8101/*===========================================================================
8102 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8103 *
8104 * DESCRIPTION:
8105 *
8106 * PARAMETERS :
8107 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008108 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8109 * urgent metadata in a batch. Always true for
8110 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008111 *
8112 * RETURN : camera_metadata_t*
8113 * metadata in a format specified by fwk
8114 *==========================================================================*/
8115camera_metadata_t*
8116QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008117 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008118{
8119 CameraMetadata camMetadata;
8120 camera_metadata_t *resultMetadata;
8121
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008122 if (!lastUrgentMetadataInBatch) {
8123 /* In batch mode, use empty metadata if this is not the last in batch
8124 */
8125 resultMetadata = allocate_camera_metadata(0, 0);
8126 return resultMetadata;
8127 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008128
8129 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8130 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8131 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8132 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8133 }
8134
8135 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8136 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8137 &aecTrigger->trigger, 1);
8138 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8139 &aecTrigger->trigger_id, 1);
8140 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8141 aecTrigger->trigger);
8142 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8143 aecTrigger->trigger_id);
8144 }
8145
8146 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8147 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8148 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8149 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8150 }
8151
Thierry Strudel3d639192016-09-09 11:52:26 -07008152 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8153 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8154 &af_trigger->trigger, 1);
8155 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8156 af_trigger->trigger);
8157 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8158 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8159 af_trigger->trigger_id);
8160 }
8161
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008162 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8163 /*af regions*/
8164 int32_t afRegions[REGIONS_TUPLE_COUNT];
8165 // Adjust crop region from sensor output coordinate system to active
8166 // array coordinate system.
8167 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8168 hAfRegions->rect.width, hAfRegions->rect.height);
8169
8170 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8171 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8172 REGIONS_TUPLE_COUNT);
8173 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8174 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8175 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8176 hAfRegions->rect.height);
8177 }
8178
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008179 // AF region confidence
8180 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8181 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8182 }
8183
Thierry Strudel3d639192016-09-09 11:52:26 -07008184 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8185 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8186 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8187 if (NAME_NOT_FOUND != val) {
8188 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8189 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8190 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8191 } else {
8192 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8193 }
8194 }
8195
8196 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8197 uint32_t aeMode = CAM_AE_MODE_MAX;
8198 int32_t flashMode = CAM_FLASH_MODE_MAX;
8199 int32_t redeye = -1;
8200 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8201 aeMode = *pAeMode;
8202 }
8203 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8204 flashMode = *pFlashMode;
8205 }
8206 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8207 redeye = *pRedeye;
8208 }
8209
8210 if (1 == redeye) {
8211 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8212 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8213 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8214 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8215 flashMode);
8216 if (NAME_NOT_FOUND != val) {
8217 fwk_aeMode = (uint8_t)val;
8218 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8219 } else {
8220 LOGE("Unsupported flash mode %d", flashMode);
8221 }
8222 } else if (aeMode == CAM_AE_MODE_ON) {
8223 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8224 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8225 } else if (aeMode == CAM_AE_MODE_OFF) {
8226 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8227 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008228 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8229 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8230 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008231 } else {
8232 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8233 "flashMode:%d, aeMode:%u!!!",
8234 redeye, flashMode, aeMode);
8235 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008236 if (mInstantAEC) {
8237 // Increment frame Idx count untill a bound reached for instant AEC.
8238 mInstantAecFrameIdxCount++;
8239 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8240 CAM_INTF_META_AEC_INFO, metadata) {
8241 LOGH("ae_params->settled = %d",ae_params->settled);
8242 // If AEC settled, or if number of frames reached bound value,
8243 // should reset instant AEC.
8244 if (ae_params->settled ||
8245 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8246 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8247 mInstantAEC = false;
8248 mResetInstantAEC = true;
8249 mInstantAecFrameIdxCount = 0;
8250 }
8251 }
8252 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008253 resultMetadata = camMetadata.release();
8254 return resultMetadata;
8255}
8256
8257/*===========================================================================
8258 * FUNCTION : dumpMetadataToFile
8259 *
8260 * DESCRIPTION: Dumps tuning metadata to file system
8261 *
8262 * PARAMETERS :
8263 * @meta : tuning metadata
8264 * @dumpFrameCount : current dump frame count
8265 * @enabled : Enable mask
8266 *
8267 *==========================================================================*/
8268void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8269 uint32_t &dumpFrameCount,
8270 bool enabled,
8271 const char *type,
8272 uint32_t frameNumber)
8273{
8274 //Some sanity checks
8275 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8276 LOGE("Tuning sensor data size bigger than expected %d: %d",
8277 meta.tuning_sensor_data_size,
8278 TUNING_SENSOR_DATA_MAX);
8279 return;
8280 }
8281
8282 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8283 LOGE("Tuning VFE data size bigger than expected %d: %d",
8284 meta.tuning_vfe_data_size,
8285 TUNING_VFE_DATA_MAX);
8286 return;
8287 }
8288
8289 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8290 LOGE("Tuning CPP data size bigger than expected %d: %d",
8291 meta.tuning_cpp_data_size,
8292 TUNING_CPP_DATA_MAX);
8293 return;
8294 }
8295
8296 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8297 LOGE("Tuning CAC data size bigger than expected %d: %d",
8298 meta.tuning_cac_data_size,
8299 TUNING_CAC_DATA_MAX);
8300 return;
8301 }
8302 //
8303
8304 if(enabled){
8305 char timeBuf[FILENAME_MAX];
8306 char buf[FILENAME_MAX];
8307 memset(buf, 0, sizeof(buf));
8308 memset(timeBuf, 0, sizeof(timeBuf));
8309 time_t current_time;
8310 struct tm * timeinfo;
8311 time (&current_time);
8312 timeinfo = localtime (&current_time);
8313 if (timeinfo != NULL) {
8314 strftime (timeBuf, sizeof(timeBuf),
8315 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8316 }
8317 String8 filePath(timeBuf);
8318 snprintf(buf,
8319 sizeof(buf),
8320 "%dm_%s_%d.bin",
8321 dumpFrameCount,
8322 type,
8323 frameNumber);
8324 filePath.append(buf);
8325 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8326 if (file_fd >= 0) {
8327 ssize_t written_len = 0;
8328 meta.tuning_data_version = TUNING_DATA_VERSION;
8329 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8330 written_len += write(file_fd, data, sizeof(uint32_t));
8331 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8332 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8333 written_len += write(file_fd, data, sizeof(uint32_t));
8334 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8335 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8336 written_len += write(file_fd, data, sizeof(uint32_t));
8337 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8338 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8339 written_len += write(file_fd, data, sizeof(uint32_t));
8340 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8341 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8342 written_len += write(file_fd, data, sizeof(uint32_t));
8343 meta.tuning_mod3_data_size = 0;
8344 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8345 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8346 written_len += write(file_fd, data, sizeof(uint32_t));
8347 size_t total_size = meta.tuning_sensor_data_size;
8348 data = (void *)((uint8_t *)&meta.data);
8349 written_len += write(file_fd, data, total_size);
8350 total_size = meta.tuning_vfe_data_size;
8351 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8352 written_len += write(file_fd, data, total_size);
8353 total_size = meta.tuning_cpp_data_size;
8354 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8355 written_len += write(file_fd, data, total_size);
8356 total_size = meta.tuning_cac_data_size;
8357 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8358 written_len += write(file_fd, data, total_size);
8359 close(file_fd);
8360 }else {
8361 LOGE("fail to open file for metadata dumping");
8362 }
8363 }
8364}
8365
8366/*===========================================================================
8367 * FUNCTION : cleanAndSortStreamInfo
8368 *
8369 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8370 * and sort them such that raw stream is at the end of the list
8371 * This is a workaround for camera daemon constraint.
8372 *
8373 * PARAMETERS : None
8374 *
8375 *==========================================================================*/
8376void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8377{
8378 List<stream_info_t *> newStreamInfo;
8379
8380 /*clean up invalid streams*/
8381 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8382 it != mStreamInfo.end();) {
8383 if(((*it)->status) == INVALID){
8384 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8385 delete channel;
8386 free(*it);
8387 it = mStreamInfo.erase(it);
8388 } else {
8389 it++;
8390 }
8391 }
8392
8393 // Move preview/video/callback/snapshot streams into newList
8394 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8395 it != mStreamInfo.end();) {
8396 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8397 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8398 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8399 newStreamInfo.push_back(*it);
8400 it = mStreamInfo.erase(it);
8401 } else
8402 it++;
8403 }
8404 // Move raw streams into newList
8405 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8406 it != mStreamInfo.end();) {
8407 newStreamInfo.push_back(*it);
8408 it = mStreamInfo.erase(it);
8409 }
8410
8411 mStreamInfo = newStreamInfo;
8412}
8413
8414/*===========================================================================
8415 * FUNCTION : extractJpegMetadata
8416 *
8417 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8418 * JPEG metadata is cached in HAL, and return as part of capture
8419 * result when metadata is returned from camera daemon.
8420 *
8421 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8422 * @request: capture request
8423 *
8424 *==========================================================================*/
8425void QCamera3HardwareInterface::extractJpegMetadata(
8426 CameraMetadata& jpegMetadata,
8427 const camera3_capture_request_t *request)
8428{
8429 CameraMetadata frame_settings;
8430 frame_settings = request->settings;
8431
8432 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8433 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8434 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8435 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8436
8437 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8438 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8439 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8440 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8441
8442 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8443 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8444 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8445 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8446
8447 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8448 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8449 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8450 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8451
8452 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8453 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8454 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8455 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8456
8457 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8458 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8459 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8460 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8461
8462 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8463 int32_t thumbnail_size[2];
8464 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8465 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8466 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8467 int32_t orientation =
8468 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008469 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008470 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8471 int32_t temp;
8472 temp = thumbnail_size[0];
8473 thumbnail_size[0] = thumbnail_size[1];
8474 thumbnail_size[1] = temp;
8475 }
8476 }
8477 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8478 thumbnail_size,
8479 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8480 }
8481
8482}
8483
8484/*===========================================================================
8485 * FUNCTION : convertToRegions
8486 *
8487 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8488 *
8489 * PARAMETERS :
8490 * @rect : cam_rect_t struct to convert
8491 * @region : int32_t destination array
8492 * @weight : if we are converting from cam_area_t, weight is valid
8493 * else weight = -1
8494 *
8495 *==========================================================================*/
8496void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8497 int32_t *region, int weight)
8498{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008499 region[FACE_LEFT] = rect.left;
8500 region[FACE_TOP] = rect.top;
8501 region[FACE_RIGHT] = rect.left + rect.width;
8502 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008503 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008504 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008505 }
8506}
8507
8508/*===========================================================================
8509 * FUNCTION : convertFromRegions
8510 *
8511 * DESCRIPTION: helper method to convert from array to cam_rect_t
8512 *
8513 * PARAMETERS :
8514 * @rect : cam_rect_t struct to convert
8515 * @region : int32_t destination array
8516 * @weight : if we are converting from cam_area_t, weight is valid
8517 * else weight = -1
8518 *
8519 *==========================================================================*/
8520void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008521 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008522{
Thierry Strudel3d639192016-09-09 11:52:26 -07008523 int32_t x_min = frame_settings.find(tag).data.i32[0];
8524 int32_t y_min = frame_settings.find(tag).data.i32[1];
8525 int32_t x_max = frame_settings.find(tag).data.i32[2];
8526 int32_t y_max = frame_settings.find(tag).data.i32[3];
8527 roi.weight = frame_settings.find(tag).data.i32[4];
8528 roi.rect.left = x_min;
8529 roi.rect.top = y_min;
8530 roi.rect.width = x_max - x_min;
8531 roi.rect.height = y_max - y_min;
8532}
8533
8534/*===========================================================================
8535 * FUNCTION : resetIfNeededROI
8536 *
8537 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8538 * crop region
8539 *
8540 * PARAMETERS :
8541 * @roi : cam_area_t struct to resize
8542 * @scalerCropRegion : cam_crop_region_t region to compare against
8543 *
8544 *
8545 *==========================================================================*/
8546bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8547 const cam_crop_region_t* scalerCropRegion)
8548{
8549 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8550 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8551 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8552 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8553
8554 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8555 * without having this check the calculations below to validate if the roi
8556 * is inside scalar crop region will fail resulting in the roi not being
8557 * reset causing algorithm to continue to use stale roi window
8558 */
8559 if (roi->weight == 0) {
8560 return true;
8561 }
8562
8563 if ((roi_x_max < scalerCropRegion->left) ||
8564 // right edge of roi window is left of scalar crop's left edge
8565 (roi_y_max < scalerCropRegion->top) ||
8566 // bottom edge of roi window is above scalar crop's top edge
8567 (roi->rect.left > crop_x_max) ||
8568 // left edge of roi window is beyond(right) of scalar crop's right edge
8569 (roi->rect.top > crop_y_max)){
8570 // top edge of roi windo is above scalar crop's top edge
8571 return false;
8572 }
8573 if (roi->rect.left < scalerCropRegion->left) {
8574 roi->rect.left = scalerCropRegion->left;
8575 }
8576 if (roi->rect.top < scalerCropRegion->top) {
8577 roi->rect.top = scalerCropRegion->top;
8578 }
8579 if (roi_x_max > crop_x_max) {
8580 roi_x_max = crop_x_max;
8581 }
8582 if (roi_y_max > crop_y_max) {
8583 roi_y_max = crop_y_max;
8584 }
8585 roi->rect.width = roi_x_max - roi->rect.left;
8586 roi->rect.height = roi_y_max - roi->rect.top;
8587 return true;
8588}
8589
8590/*===========================================================================
8591 * FUNCTION : convertLandmarks
8592 *
8593 * DESCRIPTION: helper method to extract the landmarks from face detection info
8594 *
8595 * PARAMETERS :
8596 * @landmark_data : input landmark data to be converted
8597 * @landmarks : int32_t destination array
8598 *
8599 *
8600 *==========================================================================*/
8601void QCamera3HardwareInterface::convertLandmarks(
8602 cam_face_landmarks_info_t landmark_data,
8603 int32_t *landmarks)
8604{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008605 if (landmark_data.is_left_eye_valid) {
8606 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8607 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8608 } else {
8609 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8610 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8611 }
8612
8613 if (landmark_data.is_right_eye_valid) {
8614 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8615 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8616 } else {
8617 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8618 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8619 }
8620
8621 if (landmark_data.is_mouth_valid) {
8622 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8623 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8624 } else {
8625 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8626 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8627 }
8628}
8629
8630/*===========================================================================
8631 * FUNCTION : setInvalidLandmarks
8632 *
8633 * DESCRIPTION: helper method to set invalid landmarks
8634 *
8635 * PARAMETERS :
8636 * @landmarks : int32_t destination array
8637 *
8638 *
8639 *==========================================================================*/
8640void QCamera3HardwareInterface::setInvalidLandmarks(
8641 int32_t *landmarks)
8642{
8643 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8644 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8645 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8646 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8647 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8648 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008649}
8650
8651#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008652
8653/*===========================================================================
8654 * FUNCTION : getCapabilities
8655 *
8656 * DESCRIPTION: query camera capability from back-end
8657 *
8658 * PARAMETERS :
8659 * @ops : mm-interface ops structure
8660 * @cam_handle : camera handle for which we need capability
8661 *
8662 * RETURN : ptr type of capability structure
8663 * capability for success
8664 * NULL for failure
8665 *==========================================================================*/
8666cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8667 uint32_t cam_handle)
8668{
8669 int rc = NO_ERROR;
8670 QCamera3HeapMemory *capabilityHeap = NULL;
8671 cam_capability_t *cap_ptr = NULL;
8672
8673 if (ops == NULL) {
8674 LOGE("Invalid arguments");
8675 return NULL;
8676 }
8677
8678 capabilityHeap = new QCamera3HeapMemory(1);
8679 if (capabilityHeap == NULL) {
8680 LOGE("creation of capabilityHeap failed");
8681 return NULL;
8682 }
8683
8684 /* Allocate memory for capability buffer */
8685 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8686 if(rc != OK) {
8687 LOGE("No memory for cappability");
8688 goto allocate_failed;
8689 }
8690
8691 /* Map memory for capability buffer */
8692 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8693
8694 rc = ops->map_buf(cam_handle,
8695 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8696 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8697 if(rc < 0) {
8698 LOGE("failed to map capability buffer");
8699 rc = FAILED_TRANSACTION;
8700 goto map_failed;
8701 }
8702
8703 /* Query Capability */
8704 rc = ops->query_capability(cam_handle);
8705 if(rc < 0) {
8706 LOGE("failed to query capability");
8707 rc = FAILED_TRANSACTION;
8708 goto query_failed;
8709 }
8710
8711 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8712 if (cap_ptr == NULL) {
8713 LOGE("out of memory");
8714 rc = NO_MEMORY;
8715 goto query_failed;
8716 }
8717
8718 memset(cap_ptr, 0, sizeof(cam_capability_t));
8719 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8720
8721 int index;
8722 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8723 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8724 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8725 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8726 }
8727
8728query_failed:
8729 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8730map_failed:
8731 capabilityHeap->deallocate();
8732allocate_failed:
8733 delete capabilityHeap;
8734
8735 if (rc != NO_ERROR) {
8736 return NULL;
8737 } else {
8738 return cap_ptr;
8739 }
8740}
8741
Thierry Strudel3d639192016-09-09 11:52:26 -07008742/*===========================================================================
8743 * FUNCTION : initCapabilities
8744 *
8745 * DESCRIPTION: initialize camera capabilities in static data struct
8746 *
8747 * PARAMETERS :
8748 * @cameraId : camera Id
8749 *
8750 * RETURN : int32_t type of status
8751 * NO_ERROR -- success
8752 * none-zero failure code
8753 *==========================================================================*/
8754int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8755{
8756 int rc = 0;
8757 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008758 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008759
8760 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8761 if (rc) {
8762 LOGE("camera_open failed. rc = %d", rc);
8763 goto open_failed;
8764 }
8765 if (!cameraHandle) {
8766 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8767 goto open_failed;
8768 }
8769
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008770 handle = get_main_camera_handle(cameraHandle->camera_handle);
8771 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8772 if (gCamCapability[cameraId] == NULL) {
8773 rc = FAILED_TRANSACTION;
8774 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008775 }
8776
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008777 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008778 if (is_dual_camera_by_idx(cameraId)) {
8779 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8780 gCamCapability[cameraId]->aux_cam_cap =
8781 getCapabilities(cameraHandle->ops, handle);
8782 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8783 rc = FAILED_TRANSACTION;
8784 free(gCamCapability[cameraId]);
8785 goto failed_op;
8786 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008787
8788 // Copy the main camera capability to main_cam_cap struct
8789 gCamCapability[cameraId]->main_cam_cap =
8790 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8791 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8792 LOGE("out of memory");
8793 rc = NO_MEMORY;
8794 goto failed_op;
8795 }
8796 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8797 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008798 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008799failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008800 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8801 cameraHandle = NULL;
8802open_failed:
8803 return rc;
8804}
8805
8806/*==========================================================================
8807 * FUNCTION : get3Aversion
8808 *
8809 * DESCRIPTION: get the Q3A S/W version
8810 *
8811 * PARAMETERS :
8812 * @sw_version: Reference of Q3A structure which will hold version info upon
8813 * return
8814 *
8815 * RETURN : None
8816 *
8817 *==========================================================================*/
8818void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8819{
8820 if(gCamCapability[mCameraId])
8821 sw_version = gCamCapability[mCameraId]->q3a_version;
8822 else
8823 LOGE("Capability structure NULL!");
8824}
8825
8826
8827/*===========================================================================
8828 * FUNCTION : initParameters
8829 *
8830 * DESCRIPTION: initialize camera parameters
8831 *
8832 * PARAMETERS :
8833 *
8834 * RETURN : int32_t type of status
8835 * NO_ERROR -- success
8836 * none-zero failure code
8837 *==========================================================================*/
8838int QCamera3HardwareInterface::initParameters()
8839{
8840 int rc = 0;
8841
8842 //Allocate Set Param Buffer
8843 mParamHeap = new QCamera3HeapMemory(1);
8844 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8845 if(rc != OK) {
8846 rc = NO_MEMORY;
8847 LOGE("Failed to allocate SETPARM Heap memory");
8848 delete mParamHeap;
8849 mParamHeap = NULL;
8850 return rc;
8851 }
8852
8853 //Map memory for parameters buffer
8854 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8855 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8856 mParamHeap->getFd(0),
8857 sizeof(metadata_buffer_t),
8858 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8859 if(rc < 0) {
8860 LOGE("failed to map SETPARM buffer");
8861 rc = FAILED_TRANSACTION;
8862 mParamHeap->deallocate();
8863 delete mParamHeap;
8864 mParamHeap = NULL;
8865 return rc;
8866 }
8867
8868 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8869
8870 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8871 return rc;
8872}
8873
8874/*===========================================================================
8875 * FUNCTION : deinitParameters
8876 *
8877 * DESCRIPTION: de-initialize camera parameters
8878 *
8879 * PARAMETERS :
8880 *
8881 * RETURN : NONE
8882 *==========================================================================*/
8883void QCamera3HardwareInterface::deinitParameters()
8884{
8885 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8886 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8887
8888 mParamHeap->deallocate();
8889 delete mParamHeap;
8890 mParamHeap = NULL;
8891
8892 mParameters = NULL;
8893
8894 free(mPrevParameters);
8895 mPrevParameters = NULL;
8896}
8897
8898/*===========================================================================
8899 * FUNCTION : calcMaxJpegSize
8900 *
8901 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8902 *
8903 * PARAMETERS :
8904 *
8905 * RETURN : max_jpeg_size
8906 *==========================================================================*/
8907size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8908{
8909 size_t max_jpeg_size = 0;
8910 size_t temp_width, temp_height;
8911 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8912 MAX_SIZES_CNT);
8913 for (size_t i = 0; i < count; i++) {
8914 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8915 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8916 if (temp_width * temp_height > max_jpeg_size ) {
8917 max_jpeg_size = temp_width * temp_height;
8918 }
8919 }
8920 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8921 return max_jpeg_size;
8922}
8923
8924/*===========================================================================
8925 * FUNCTION : getMaxRawSize
8926 *
8927 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8928 *
8929 * PARAMETERS :
8930 *
8931 * RETURN : Largest supported Raw Dimension
8932 *==========================================================================*/
8933cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8934{
8935 int max_width = 0;
8936 cam_dimension_t maxRawSize;
8937
8938 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8939 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8940 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8941 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8942 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8943 }
8944 }
8945 return maxRawSize;
8946}
8947
8948
8949/*===========================================================================
8950 * FUNCTION : calcMaxJpegDim
8951 *
8952 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8953 *
8954 * PARAMETERS :
8955 *
8956 * RETURN : max_jpeg_dim
8957 *==========================================================================*/
8958cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8959{
8960 cam_dimension_t max_jpeg_dim;
8961 cam_dimension_t curr_jpeg_dim;
8962 max_jpeg_dim.width = 0;
8963 max_jpeg_dim.height = 0;
8964 curr_jpeg_dim.width = 0;
8965 curr_jpeg_dim.height = 0;
8966 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8967 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8968 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8969 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8970 max_jpeg_dim.width * max_jpeg_dim.height ) {
8971 max_jpeg_dim.width = curr_jpeg_dim.width;
8972 max_jpeg_dim.height = curr_jpeg_dim.height;
8973 }
8974 }
8975 return max_jpeg_dim;
8976}
8977
8978/*===========================================================================
8979 * FUNCTION : addStreamConfig
8980 *
8981 * DESCRIPTION: adds the stream configuration to the array
8982 *
8983 * PARAMETERS :
8984 * @available_stream_configs : pointer to stream configuration array
8985 * @scalar_format : scalar format
8986 * @dim : configuration dimension
8987 * @config_type : input or output configuration type
8988 *
8989 * RETURN : NONE
8990 *==========================================================================*/
8991void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8992 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8993{
8994 available_stream_configs.add(scalar_format);
8995 available_stream_configs.add(dim.width);
8996 available_stream_configs.add(dim.height);
8997 available_stream_configs.add(config_type);
8998}
8999
9000/*===========================================================================
9001 * FUNCTION : suppportBurstCapture
9002 *
9003 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9004 *
9005 * PARAMETERS :
9006 * @cameraId : camera Id
9007 *
9008 * RETURN : true if camera supports BURST_CAPTURE
9009 * false otherwise
9010 *==========================================================================*/
9011bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9012{
9013 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9014 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9015 const int32_t highResWidth = 3264;
9016 const int32_t highResHeight = 2448;
9017
9018 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9019 // Maximum resolution images cannot be captured at >= 10fps
9020 // -> not supporting BURST_CAPTURE
9021 return false;
9022 }
9023
9024 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9025 // Maximum resolution images can be captured at >= 20fps
9026 // --> supporting BURST_CAPTURE
9027 return true;
9028 }
9029
9030 // Find the smallest highRes resolution, or largest resolution if there is none
9031 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9032 MAX_SIZES_CNT);
9033 size_t highRes = 0;
9034 while ((highRes + 1 < totalCnt) &&
9035 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9036 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9037 highResWidth * highResHeight)) {
9038 highRes++;
9039 }
9040 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9041 return true;
9042 } else {
9043 return false;
9044 }
9045}
9046
9047/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009048 * FUNCTION : getPDStatIndex
9049 *
9050 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9051 *
9052 * PARAMETERS :
9053 * @caps : camera capabilities
9054 *
9055 * RETURN : int32_t type
9056 * non-negative - on success
9057 * -1 - on failure
9058 *==========================================================================*/
9059int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9060 if (nullptr == caps) {
9061 return -1;
9062 }
9063
9064 uint32_t metaRawCount = caps->meta_raw_channel_count;
9065 int32_t ret = -1;
9066 for (size_t i = 0; i < metaRawCount; i++) {
9067 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9068 ret = i;
9069 break;
9070 }
9071 }
9072
9073 return ret;
9074}
9075
9076/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009077 * FUNCTION : initStaticMetadata
9078 *
9079 * DESCRIPTION: initialize the static metadata
9080 *
9081 * PARAMETERS :
9082 * @cameraId : camera Id
9083 *
9084 * RETURN : int32_t type of status
9085 * 0 -- success
9086 * non-zero failure code
9087 *==========================================================================*/
9088int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9089{
9090 int rc = 0;
9091 CameraMetadata staticInfo;
9092 size_t count = 0;
9093 bool limitedDevice = false;
9094 char prop[PROPERTY_VALUE_MAX];
9095 bool supportBurst = false;
9096
9097 supportBurst = supportBurstCapture(cameraId);
9098
9099 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9100 * guaranteed or if min fps of max resolution is less than 20 fps, its
9101 * advertised as limited device*/
9102 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9103 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9104 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9105 !supportBurst;
9106
9107 uint8_t supportedHwLvl = limitedDevice ?
9108 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009109#ifndef USE_HAL_3_3
9110 // LEVEL_3 - This device will support level 3.
9111 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9112#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009113 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009114#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009115
9116 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9117 &supportedHwLvl, 1);
9118
9119 bool facingBack = false;
9120 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9121 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9122 facingBack = true;
9123 }
9124 /*HAL 3 only*/
9125 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9126 &gCamCapability[cameraId]->min_focus_distance, 1);
9127
9128 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9129 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9130
9131 /*should be using focal lengths but sensor doesn't provide that info now*/
9132 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9133 &gCamCapability[cameraId]->focal_length,
9134 1);
9135
9136 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9137 gCamCapability[cameraId]->apertures,
9138 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9139
9140 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9141 gCamCapability[cameraId]->filter_densities,
9142 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9143
9144
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009145 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9146 size_t mode_count =
9147 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9148 for (size_t i = 0; i < mode_count; i++) {
9149 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9150 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009151 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009152 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009153
9154 int32_t lens_shading_map_size[] = {
9155 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9156 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9157 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9158 lens_shading_map_size,
9159 sizeof(lens_shading_map_size)/sizeof(int32_t));
9160
9161 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9162 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9163
9164 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9165 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9166
9167 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9168 &gCamCapability[cameraId]->max_frame_duration, 1);
9169
9170 camera_metadata_rational baseGainFactor = {
9171 gCamCapability[cameraId]->base_gain_factor.numerator,
9172 gCamCapability[cameraId]->base_gain_factor.denominator};
9173 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9174 &baseGainFactor, 1);
9175
9176 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9177 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9178
9179 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9180 gCamCapability[cameraId]->pixel_array_size.height};
9181 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9182 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9183
9184 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9185 gCamCapability[cameraId]->active_array_size.top,
9186 gCamCapability[cameraId]->active_array_size.width,
9187 gCamCapability[cameraId]->active_array_size.height};
9188 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9189 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9190
9191 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9192 &gCamCapability[cameraId]->white_level, 1);
9193
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009194 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9195 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9196 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009197 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009198 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009199
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009200#ifndef USE_HAL_3_3
9201 bool hasBlackRegions = false;
9202 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9203 LOGW("black_region_count: %d is bounded to %d",
9204 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9205 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9206 }
9207 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9208 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9209 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9210 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9211 }
9212 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9213 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9214 hasBlackRegions = true;
9215 }
9216#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009217 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9218 &gCamCapability[cameraId]->flash_charge_duration, 1);
9219
9220 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9221 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9222
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009223 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9224 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9225 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009226 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9227 &timestampSource, 1);
9228
Thierry Strudel54dc9782017-02-15 12:12:10 -08009229 //update histogram vendor data
9230 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009231 &gCamCapability[cameraId]->histogram_size, 1);
9232
Thierry Strudel54dc9782017-02-15 12:12:10 -08009233 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009234 &gCamCapability[cameraId]->max_histogram_count, 1);
9235
Shuzhen Wang14415f52016-11-16 18:26:18 -08009236 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9237 //so that app can request fewer number of bins than the maximum supported.
9238 std::vector<int32_t> histBins;
9239 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9240 histBins.push_back(maxHistBins);
9241 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9242 (maxHistBins & 0x1) == 0) {
9243 histBins.push_back(maxHistBins >> 1);
9244 maxHistBins >>= 1;
9245 }
9246 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9247 histBins.data(), histBins.size());
9248
Thierry Strudel3d639192016-09-09 11:52:26 -07009249 int32_t sharpness_map_size[] = {
9250 gCamCapability[cameraId]->sharpness_map_size.width,
9251 gCamCapability[cameraId]->sharpness_map_size.height};
9252
9253 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9254 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9255
9256 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9257 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9258
Emilian Peev0f3c3162017-03-15 12:57:46 +00009259 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9260 if (0 <= indexPD) {
9261 // Advertise PD stats data as part of the Depth capabilities
9262 int32_t depthWidth =
9263 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9264 int32_t depthHeight =
9265 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9266 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9267 assert(0 < depthSamplesCount);
9268 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9269 &depthSamplesCount, 1);
9270
9271 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9272 depthHeight,
9273 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9274 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9275 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9276 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9277 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9278
9279 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9280 depthHeight, 33333333,
9281 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9282 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9283 depthMinDuration,
9284 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9285
9286 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9287 depthHeight, 0,
9288 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9289 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9290 depthStallDuration,
9291 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9292
9293 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9294 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9295 }
9296
Thierry Strudel3d639192016-09-09 11:52:26 -07009297 int32_t scalar_formats[] = {
9298 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9299 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9300 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9301 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9302 HAL_PIXEL_FORMAT_RAW10,
9303 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009304 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9305 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9306 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009307
9308 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9309 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9310 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9311 count, MAX_SIZES_CNT, available_processed_sizes);
9312 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9313 available_processed_sizes, count * 2);
9314
9315 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9316 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9317 makeTable(gCamCapability[cameraId]->raw_dim,
9318 count, MAX_SIZES_CNT, available_raw_sizes);
9319 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9320 available_raw_sizes, count * 2);
9321
9322 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9323 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9324 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9325 count, MAX_SIZES_CNT, available_fps_ranges);
9326 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9327 available_fps_ranges, count * 2);
9328
9329 camera_metadata_rational exposureCompensationStep = {
9330 gCamCapability[cameraId]->exp_compensation_step.numerator,
9331 gCamCapability[cameraId]->exp_compensation_step.denominator};
9332 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9333 &exposureCompensationStep, 1);
9334
9335 Vector<uint8_t> availableVstabModes;
9336 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9337 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009338 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009339 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009340 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009341 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009342 count = IS_TYPE_MAX;
9343 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9344 for (size_t i = 0; i < count; i++) {
9345 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9346 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9347 eisSupported = true;
9348 break;
9349 }
9350 }
9351 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009352 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9353 }
9354 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9355 availableVstabModes.array(), availableVstabModes.size());
9356
9357 /*HAL 1 and HAL 3 common*/
9358 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9359 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9360 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009361 // Cap the max zoom to the max preferred value
9362 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009363 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9364 &maxZoom, 1);
9365
9366 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9367 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9368
9369 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9370 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9371 max3aRegions[2] = 0; /* AF not supported */
9372 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9373 max3aRegions, 3);
9374
9375 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9376 memset(prop, 0, sizeof(prop));
9377 property_get("persist.camera.facedetect", prop, "1");
9378 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9379 LOGD("Support face detection mode: %d",
9380 supportedFaceDetectMode);
9381
9382 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009383 /* support mode should be OFF if max number of face is 0 */
9384 if (maxFaces <= 0) {
9385 supportedFaceDetectMode = 0;
9386 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009387 Vector<uint8_t> availableFaceDetectModes;
9388 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9389 if (supportedFaceDetectMode == 1) {
9390 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9391 } else if (supportedFaceDetectMode == 2) {
9392 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9393 } else if (supportedFaceDetectMode == 3) {
9394 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9395 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9396 } else {
9397 maxFaces = 0;
9398 }
9399 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9400 availableFaceDetectModes.array(),
9401 availableFaceDetectModes.size());
9402 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9403 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009404 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9405 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9406 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009407
9408 int32_t exposureCompensationRange[] = {
9409 gCamCapability[cameraId]->exposure_compensation_min,
9410 gCamCapability[cameraId]->exposure_compensation_max};
9411 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9412 exposureCompensationRange,
9413 sizeof(exposureCompensationRange)/sizeof(int32_t));
9414
9415 uint8_t lensFacing = (facingBack) ?
9416 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9417 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9418
9419 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9420 available_thumbnail_sizes,
9421 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9422
9423 /*all sizes will be clubbed into this tag*/
9424 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9425 /*android.scaler.availableStreamConfigurations*/
9426 Vector<int32_t> available_stream_configs;
9427 cam_dimension_t active_array_dim;
9428 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9429 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009430
9431 /*advertise list of input dimensions supported based on below property.
9432 By default all sizes upto 5MP will be advertised.
9433 Note that the setprop resolution format should be WxH.
9434 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9435 To list all supported sizes, setprop needs to be set with "0x0" */
9436 cam_dimension_t minInputSize = {2592,1944}; //5MP
9437 memset(prop, 0, sizeof(prop));
9438 property_get("persist.camera.input.minsize", prop, "2592x1944");
9439 if (strlen(prop) > 0) {
9440 char *saveptr = NULL;
9441 char *token = strtok_r(prop, "x", &saveptr);
9442 if (token != NULL) {
9443 minInputSize.width = atoi(token);
9444 }
9445 token = strtok_r(NULL, "x", &saveptr);
9446 if (token != NULL) {
9447 minInputSize.height = atoi(token);
9448 }
9449 }
9450
Thierry Strudel3d639192016-09-09 11:52:26 -07009451 /* Add input/output stream configurations for each scalar formats*/
9452 for (size_t j = 0; j < scalar_formats_count; j++) {
9453 switch (scalar_formats[j]) {
9454 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9455 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9456 case HAL_PIXEL_FORMAT_RAW10:
9457 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9458 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9459 addStreamConfig(available_stream_configs, scalar_formats[j],
9460 gCamCapability[cameraId]->raw_dim[i],
9461 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9462 }
9463 break;
9464 case HAL_PIXEL_FORMAT_BLOB:
9465 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9466 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9467 addStreamConfig(available_stream_configs, scalar_formats[j],
9468 gCamCapability[cameraId]->picture_sizes_tbl[i],
9469 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9470 }
9471 break;
9472 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9473 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9474 default:
9475 cam_dimension_t largest_picture_size;
9476 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9477 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9478 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9479 addStreamConfig(available_stream_configs, scalar_formats[j],
9480 gCamCapability[cameraId]->picture_sizes_tbl[i],
9481 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009482 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009483 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9484 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009485 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9486 >= minInputSize.width) || (gCamCapability[cameraId]->
9487 picture_sizes_tbl[i].height >= minInputSize.height)) {
9488 addStreamConfig(available_stream_configs, scalar_formats[j],
9489 gCamCapability[cameraId]->picture_sizes_tbl[i],
9490 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9491 }
9492 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009493 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009494
Thierry Strudel3d639192016-09-09 11:52:26 -07009495 break;
9496 }
9497 }
9498
9499 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9500 available_stream_configs.array(), available_stream_configs.size());
9501 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9502 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9503
9504 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9505 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9506
9507 /* android.scaler.availableMinFrameDurations */
9508 Vector<int64_t> available_min_durations;
9509 for (size_t j = 0; j < scalar_formats_count; j++) {
9510 switch (scalar_formats[j]) {
9511 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9512 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9513 case HAL_PIXEL_FORMAT_RAW10:
9514 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9515 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9516 available_min_durations.add(scalar_formats[j]);
9517 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9518 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9519 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9520 }
9521 break;
9522 default:
9523 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9524 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9525 available_min_durations.add(scalar_formats[j]);
9526 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9527 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9528 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9529 }
9530 break;
9531 }
9532 }
9533 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9534 available_min_durations.array(), available_min_durations.size());
9535
9536 Vector<int32_t> available_hfr_configs;
9537 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9538 int32_t fps = 0;
9539 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9540 case CAM_HFR_MODE_60FPS:
9541 fps = 60;
9542 break;
9543 case CAM_HFR_MODE_90FPS:
9544 fps = 90;
9545 break;
9546 case CAM_HFR_MODE_120FPS:
9547 fps = 120;
9548 break;
9549 case CAM_HFR_MODE_150FPS:
9550 fps = 150;
9551 break;
9552 case CAM_HFR_MODE_180FPS:
9553 fps = 180;
9554 break;
9555 case CAM_HFR_MODE_210FPS:
9556 fps = 210;
9557 break;
9558 case CAM_HFR_MODE_240FPS:
9559 fps = 240;
9560 break;
9561 case CAM_HFR_MODE_480FPS:
9562 fps = 480;
9563 break;
9564 case CAM_HFR_MODE_OFF:
9565 case CAM_HFR_MODE_MAX:
9566 default:
9567 break;
9568 }
9569
9570 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9571 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9572 /* For each HFR frame rate, need to advertise one variable fps range
9573 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9574 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9575 * set by the app. When video recording is started, [120, 120] is
9576 * set. This way sensor configuration does not change when recording
9577 * is started */
9578
9579 /* (width, height, fps_min, fps_max, batch_size_max) */
9580 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9581 j < MAX_SIZES_CNT; j++) {
9582 available_hfr_configs.add(
9583 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9584 available_hfr_configs.add(
9585 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9586 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9587 available_hfr_configs.add(fps);
9588 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9589
9590 /* (width, height, fps_min, fps_max, batch_size_max) */
9591 available_hfr_configs.add(
9592 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9593 available_hfr_configs.add(
9594 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9595 available_hfr_configs.add(fps);
9596 available_hfr_configs.add(fps);
9597 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9598 }
9599 }
9600 }
9601 //Advertise HFR capability only if the property is set
9602 memset(prop, 0, sizeof(prop));
9603 property_get("persist.camera.hal3hfr.enable", prop, "1");
9604 uint8_t hfrEnable = (uint8_t)atoi(prop);
9605
9606 if(hfrEnable && available_hfr_configs.array()) {
9607 staticInfo.update(
9608 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9609 available_hfr_configs.array(), available_hfr_configs.size());
9610 }
9611
9612 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9613 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9614 &max_jpeg_size, 1);
9615
9616 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9617 size_t size = 0;
9618 count = CAM_EFFECT_MODE_MAX;
9619 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9620 for (size_t i = 0; i < count; i++) {
9621 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9622 gCamCapability[cameraId]->supported_effects[i]);
9623 if (NAME_NOT_FOUND != val) {
9624 avail_effects[size] = (uint8_t)val;
9625 size++;
9626 }
9627 }
9628 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9629 avail_effects,
9630 size);
9631
9632 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9633 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9634 size_t supported_scene_modes_cnt = 0;
9635 count = CAM_SCENE_MODE_MAX;
9636 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9637 for (size_t i = 0; i < count; i++) {
9638 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9639 CAM_SCENE_MODE_OFF) {
9640 int val = lookupFwkName(SCENE_MODES_MAP,
9641 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9642 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009643
Thierry Strudel3d639192016-09-09 11:52:26 -07009644 if (NAME_NOT_FOUND != val) {
9645 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9646 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9647 supported_scene_modes_cnt++;
9648 }
9649 }
9650 }
9651 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9652 avail_scene_modes,
9653 supported_scene_modes_cnt);
9654
9655 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9656 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9657 supported_scene_modes_cnt,
9658 CAM_SCENE_MODE_MAX,
9659 scene_mode_overrides,
9660 supported_indexes,
9661 cameraId);
9662
9663 if (supported_scene_modes_cnt == 0) {
9664 supported_scene_modes_cnt = 1;
9665 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9666 }
9667
9668 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9669 scene_mode_overrides, supported_scene_modes_cnt * 3);
9670
9671 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9672 ANDROID_CONTROL_MODE_AUTO,
9673 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9674 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9675 available_control_modes,
9676 3);
9677
9678 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9679 size = 0;
9680 count = CAM_ANTIBANDING_MODE_MAX;
9681 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9682 for (size_t i = 0; i < count; i++) {
9683 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9684 gCamCapability[cameraId]->supported_antibandings[i]);
9685 if (NAME_NOT_FOUND != val) {
9686 avail_antibanding_modes[size] = (uint8_t)val;
9687 size++;
9688 }
9689
9690 }
9691 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9692 avail_antibanding_modes,
9693 size);
9694
9695 uint8_t avail_abberation_modes[] = {
9696 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9697 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9698 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9699 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9700 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9701 if (0 == count) {
9702 // If no aberration correction modes are available for a device, this advertise OFF mode
9703 size = 1;
9704 } else {
9705 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9706 // So, advertize all 3 modes if atleast any one mode is supported as per the
9707 // new M requirement
9708 size = 3;
9709 }
9710 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9711 avail_abberation_modes,
9712 size);
9713
9714 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9715 size = 0;
9716 count = CAM_FOCUS_MODE_MAX;
9717 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9718 for (size_t i = 0; i < count; i++) {
9719 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9720 gCamCapability[cameraId]->supported_focus_modes[i]);
9721 if (NAME_NOT_FOUND != val) {
9722 avail_af_modes[size] = (uint8_t)val;
9723 size++;
9724 }
9725 }
9726 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9727 avail_af_modes,
9728 size);
9729
9730 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9731 size = 0;
9732 count = CAM_WB_MODE_MAX;
9733 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9734 for (size_t i = 0; i < count; i++) {
9735 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9736 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9737 gCamCapability[cameraId]->supported_white_balances[i]);
9738 if (NAME_NOT_FOUND != val) {
9739 avail_awb_modes[size] = (uint8_t)val;
9740 size++;
9741 }
9742 }
9743 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9744 avail_awb_modes,
9745 size);
9746
9747 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9748 count = CAM_FLASH_FIRING_LEVEL_MAX;
9749 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9750 count);
9751 for (size_t i = 0; i < count; i++) {
9752 available_flash_levels[i] =
9753 gCamCapability[cameraId]->supported_firing_levels[i];
9754 }
9755 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9756 available_flash_levels, count);
9757
9758 uint8_t flashAvailable;
9759 if (gCamCapability[cameraId]->flash_available)
9760 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9761 else
9762 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9763 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9764 &flashAvailable, 1);
9765
9766 Vector<uint8_t> avail_ae_modes;
9767 count = CAM_AE_MODE_MAX;
9768 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9769 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009770 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9771 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9772 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9773 }
9774 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009775 }
9776 if (flashAvailable) {
9777 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9778 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9779 }
9780 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9781 avail_ae_modes.array(),
9782 avail_ae_modes.size());
9783
9784 int32_t sensitivity_range[2];
9785 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9786 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9787 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9788 sensitivity_range,
9789 sizeof(sensitivity_range) / sizeof(int32_t));
9790
9791 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9792 &gCamCapability[cameraId]->max_analog_sensitivity,
9793 1);
9794
9795 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9796 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9797 &sensor_orientation,
9798 1);
9799
9800 int32_t max_output_streams[] = {
9801 MAX_STALLING_STREAMS,
9802 MAX_PROCESSED_STREAMS,
9803 MAX_RAW_STREAMS};
9804 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9805 max_output_streams,
9806 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9807
9808 uint8_t avail_leds = 0;
9809 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9810 &avail_leds, 0);
9811
9812 uint8_t focus_dist_calibrated;
9813 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9814 gCamCapability[cameraId]->focus_dist_calibrated);
9815 if (NAME_NOT_FOUND != val) {
9816 focus_dist_calibrated = (uint8_t)val;
9817 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9818 &focus_dist_calibrated, 1);
9819 }
9820
9821 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9822 size = 0;
9823 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9824 MAX_TEST_PATTERN_CNT);
9825 for (size_t i = 0; i < count; i++) {
9826 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9827 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9828 if (NAME_NOT_FOUND != testpatternMode) {
9829 avail_testpattern_modes[size] = testpatternMode;
9830 size++;
9831 }
9832 }
9833 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9834 avail_testpattern_modes,
9835 size);
9836
9837 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9838 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9839 &max_pipeline_depth,
9840 1);
9841
9842 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9843 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9844 &partial_result_count,
9845 1);
9846
9847 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9848 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9849
9850 Vector<uint8_t> available_capabilities;
9851 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9852 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9853 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9854 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9855 if (supportBurst) {
9856 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9857 }
9858 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9859 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9860 if (hfrEnable && available_hfr_configs.array()) {
9861 available_capabilities.add(
9862 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9863 }
9864
9865 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9866 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9867 }
9868 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9869 available_capabilities.array(),
9870 available_capabilities.size());
9871
9872 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9873 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9874 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9875 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9876
9877 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9878 &aeLockAvailable, 1);
9879
9880 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9881 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9882 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9883 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9884
9885 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9886 &awbLockAvailable, 1);
9887
9888 int32_t max_input_streams = 1;
9889 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9890 &max_input_streams,
9891 1);
9892
9893 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9894 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9895 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9896 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9897 HAL_PIXEL_FORMAT_YCbCr_420_888};
9898 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9899 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9900
9901 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9902 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9903 &max_latency,
9904 1);
9905
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009906#ifndef USE_HAL_3_3
9907 int32_t isp_sensitivity_range[2];
9908 isp_sensitivity_range[0] =
9909 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9910 isp_sensitivity_range[1] =
9911 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9912 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9913 isp_sensitivity_range,
9914 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9915#endif
9916
Thierry Strudel3d639192016-09-09 11:52:26 -07009917 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9918 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9919 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9920 available_hot_pixel_modes,
9921 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9922
9923 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9924 ANDROID_SHADING_MODE_FAST,
9925 ANDROID_SHADING_MODE_HIGH_QUALITY};
9926 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9927 available_shading_modes,
9928 3);
9929
9930 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9931 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9932 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9933 available_lens_shading_map_modes,
9934 2);
9935
9936 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9937 ANDROID_EDGE_MODE_FAST,
9938 ANDROID_EDGE_MODE_HIGH_QUALITY,
9939 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9940 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9941 available_edge_modes,
9942 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9943
9944 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9945 ANDROID_NOISE_REDUCTION_MODE_FAST,
9946 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9947 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9948 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9949 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9950 available_noise_red_modes,
9951 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9952
9953 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9954 ANDROID_TONEMAP_MODE_FAST,
9955 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9956 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9957 available_tonemap_modes,
9958 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9959
9960 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9961 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9962 available_hot_pixel_map_modes,
9963 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9964
9965 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9966 gCamCapability[cameraId]->reference_illuminant1);
9967 if (NAME_NOT_FOUND != val) {
9968 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9969 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9970 }
9971
9972 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9973 gCamCapability[cameraId]->reference_illuminant2);
9974 if (NAME_NOT_FOUND != val) {
9975 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9976 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9977 }
9978
9979 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9980 (void *)gCamCapability[cameraId]->forward_matrix1,
9981 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9982
9983 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9984 (void *)gCamCapability[cameraId]->forward_matrix2,
9985 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9986
9987 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9988 (void *)gCamCapability[cameraId]->color_transform1,
9989 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9990
9991 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9992 (void *)gCamCapability[cameraId]->color_transform2,
9993 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9994
9995 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9996 (void *)gCamCapability[cameraId]->calibration_transform1,
9997 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9998
9999 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10000 (void *)gCamCapability[cameraId]->calibration_transform2,
10001 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10002
10003 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10004 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10005 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10006 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10007 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10008 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10009 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10010 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10011 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10012 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10013 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10014 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10015 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10016 ANDROID_JPEG_GPS_COORDINATES,
10017 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10018 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10019 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10020 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10021 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10022 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10023 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10024 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10025 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10026 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010027#ifndef USE_HAL_3_3
10028 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10029#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010030 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010031 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010032 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10033 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010034 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010035 /* DevCamDebug metadata request_keys_basic */
10036 DEVCAMDEBUG_META_ENABLE,
10037 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010038 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010039 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010040 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010041 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -080010042 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010043
10044 size_t request_keys_cnt =
10045 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10046 Vector<int32_t> available_request_keys;
10047 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10048 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10049 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10050 }
10051
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010052 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +000010053 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010054 }
10055
Thierry Strudel3d639192016-09-09 11:52:26 -070010056 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10057 available_request_keys.array(), available_request_keys.size());
10058
10059 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10060 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10061 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10062 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10063 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10064 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10065 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10066 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10067 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10068 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10069 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10070 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10071 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10072 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10073 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10074 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10075 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010076 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010077 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10078 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10079 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010080 ANDROID_STATISTICS_FACE_SCORES,
10081#ifndef USE_HAL_3_3
10082 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10083#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010084 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010085 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010086 // DevCamDebug metadata result_keys_basic
10087 DEVCAMDEBUG_META_ENABLE,
10088 // DevCamDebug metadata result_keys AF
10089 DEVCAMDEBUG_AF_LENS_POSITION,
10090 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10091 DEVCAMDEBUG_AF_TOF_DISTANCE,
10092 DEVCAMDEBUG_AF_LUMA,
10093 DEVCAMDEBUG_AF_HAF_STATE,
10094 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10095 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10096 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10097 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10098 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10099 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10100 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10101 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10102 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10103 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10104 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10105 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10106 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10107 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10108 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10109 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10110 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10111 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10112 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10113 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10114 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10115 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10116 // DevCamDebug metadata result_keys AEC
10117 DEVCAMDEBUG_AEC_TARGET_LUMA,
10118 DEVCAMDEBUG_AEC_COMP_LUMA,
10119 DEVCAMDEBUG_AEC_AVG_LUMA,
10120 DEVCAMDEBUG_AEC_CUR_LUMA,
10121 DEVCAMDEBUG_AEC_LINECOUNT,
10122 DEVCAMDEBUG_AEC_REAL_GAIN,
10123 DEVCAMDEBUG_AEC_EXP_INDEX,
10124 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010125 // DevCamDebug metadata result_keys zzHDR
10126 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10127 DEVCAMDEBUG_AEC_L_LINECOUNT,
10128 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10129 DEVCAMDEBUG_AEC_S_LINECOUNT,
10130 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10131 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10132 // DevCamDebug metadata result_keys ADRC
10133 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10134 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10135 DEVCAMDEBUG_AEC_GTM_RATIO,
10136 DEVCAMDEBUG_AEC_LTM_RATIO,
10137 DEVCAMDEBUG_AEC_LA_RATIO,
10138 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010139 // DevCamDebug metadata result_keys AWB
10140 DEVCAMDEBUG_AWB_R_GAIN,
10141 DEVCAMDEBUG_AWB_G_GAIN,
10142 DEVCAMDEBUG_AWB_B_GAIN,
10143 DEVCAMDEBUG_AWB_CCT,
10144 DEVCAMDEBUG_AWB_DECISION,
10145 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010146 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10147 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10148 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010149 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010150 };
10151
Thierry Strudel3d639192016-09-09 11:52:26 -070010152 size_t result_keys_cnt =
10153 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10154
10155 Vector<int32_t> available_result_keys;
10156 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10157 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10158 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10159 }
10160 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10161 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10162 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10163 }
10164 if (supportedFaceDetectMode == 1) {
10165 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10166 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10167 } else if ((supportedFaceDetectMode == 2) ||
10168 (supportedFaceDetectMode == 3)) {
10169 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10170 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10171 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010172#ifndef USE_HAL_3_3
10173 if (hasBlackRegions) {
10174 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10175 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10176 }
10177#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010178
10179 if (gExposeEnableZslKey) {
10180 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10181 }
10182
Thierry Strudel3d639192016-09-09 11:52:26 -070010183 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10184 available_result_keys.array(), available_result_keys.size());
10185
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010186 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010187 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10188 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10189 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10190 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10191 ANDROID_SCALER_CROPPING_TYPE,
10192 ANDROID_SYNC_MAX_LATENCY,
10193 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10194 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10195 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10196 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10197 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10198 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10199 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10200 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10201 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10202 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10203 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10204 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10205 ANDROID_LENS_FACING,
10206 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10207 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10208 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10209 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10210 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10211 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10212 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10213 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10214 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10215 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10216 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10217 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10218 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10219 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10220 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10221 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10222 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10223 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10224 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10225 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010226 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010227 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10228 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10229 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10230 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10231 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10232 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10233 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10234 ANDROID_CONTROL_AVAILABLE_MODES,
10235 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10236 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10237 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10238 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010239 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10240#ifndef USE_HAL_3_3
10241 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10242 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10243#endif
10244 };
10245
10246 Vector<int32_t> available_characteristics_keys;
10247 available_characteristics_keys.appendArray(characteristics_keys_basic,
10248 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10249#ifndef USE_HAL_3_3
10250 if (hasBlackRegions) {
10251 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10252 }
10253#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010254
10255 if (0 <= indexPD) {
10256 int32_t depthKeys[] = {
10257 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10258 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10259 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10260 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10261 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10262 };
10263 available_characteristics_keys.appendArray(depthKeys,
10264 sizeof(depthKeys) / sizeof(depthKeys[0]));
10265 }
10266
Thierry Strudel3d639192016-09-09 11:52:26 -070010267 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010268 available_characteristics_keys.array(),
10269 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010270
10271 /*available stall durations depend on the hw + sw and will be different for different devices */
10272 /*have to add for raw after implementation*/
10273 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10274 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10275
10276 Vector<int64_t> available_stall_durations;
10277 for (uint32_t j = 0; j < stall_formats_count; j++) {
10278 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10279 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10280 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10281 available_stall_durations.add(stall_formats[j]);
10282 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10283 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10284 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10285 }
10286 } else {
10287 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10288 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10289 available_stall_durations.add(stall_formats[j]);
10290 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10291 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10292 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10293 }
10294 }
10295 }
10296 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10297 available_stall_durations.array(),
10298 available_stall_durations.size());
10299
10300 //QCAMERA3_OPAQUE_RAW
10301 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10302 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10303 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10304 case LEGACY_RAW:
10305 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10306 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10307 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10308 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10309 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10310 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10311 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10312 break;
10313 case MIPI_RAW:
10314 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10315 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10316 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10317 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10318 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10319 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10320 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10321 break;
10322 default:
10323 LOGE("unknown opaque_raw_format %d",
10324 gCamCapability[cameraId]->opaque_raw_fmt);
10325 break;
10326 }
10327 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10328
10329 Vector<int32_t> strides;
10330 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10331 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10332 cam_stream_buf_plane_info_t buf_planes;
10333 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10334 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10335 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10336 &gCamCapability[cameraId]->padding_info, &buf_planes);
10337 strides.add(buf_planes.plane_info.mp[0].stride);
10338 }
10339 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10340 strides.size());
10341
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010342 //TBD: remove the following line once backend advertises zzHDR in feature mask
10343 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010344 //Video HDR default
10345 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10346 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010347 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010348 int32_t vhdr_mode[] = {
10349 QCAMERA3_VIDEO_HDR_MODE_OFF,
10350 QCAMERA3_VIDEO_HDR_MODE_ON};
10351
10352 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10353 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10354 vhdr_mode, vhdr_mode_count);
10355 }
10356
Thierry Strudel3d639192016-09-09 11:52:26 -070010357 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10358 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10359 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10360
10361 uint8_t isMonoOnly =
10362 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10363 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10364 &isMonoOnly, 1);
10365
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010366#ifndef USE_HAL_3_3
10367 Vector<int32_t> opaque_size;
10368 for (size_t j = 0; j < scalar_formats_count; j++) {
10369 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10370 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10371 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10372 cam_stream_buf_plane_info_t buf_planes;
10373
10374 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10375 &gCamCapability[cameraId]->padding_info, &buf_planes);
10376
10377 if (rc == 0) {
10378 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10379 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10380 opaque_size.add(buf_planes.plane_info.frame_len);
10381 }else {
10382 LOGE("raw frame calculation failed!");
10383 }
10384 }
10385 }
10386 }
10387
10388 if ((opaque_size.size() > 0) &&
10389 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10390 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10391 else
10392 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10393#endif
10394
Thierry Strudel04e026f2016-10-10 11:27:36 -070010395 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10396 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10397 size = 0;
10398 count = CAM_IR_MODE_MAX;
10399 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10400 for (size_t i = 0; i < count; i++) {
10401 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10402 gCamCapability[cameraId]->supported_ir_modes[i]);
10403 if (NAME_NOT_FOUND != val) {
10404 avail_ir_modes[size] = (int32_t)val;
10405 size++;
10406 }
10407 }
10408 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10409 avail_ir_modes, size);
10410 }
10411
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010412 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10413 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10414 size = 0;
10415 count = CAM_AEC_CONVERGENCE_MAX;
10416 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10417 for (size_t i = 0; i < count; i++) {
10418 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10419 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10420 if (NAME_NOT_FOUND != val) {
10421 available_instant_aec_modes[size] = (int32_t)val;
10422 size++;
10423 }
10424 }
10425 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10426 available_instant_aec_modes, size);
10427 }
10428
Thierry Strudel54dc9782017-02-15 12:12:10 -080010429 int32_t sharpness_range[] = {
10430 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10431 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10432 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10433
10434 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10435 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10436 size = 0;
10437 count = CAM_BINNING_CORRECTION_MODE_MAX;
10438 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10439 for (size_t i = 0; i < count; i++) {
10440 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10441 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10442 gCamCapability[cameraId]->supported_binning_modes[i]);
10443 if (NAME_NOT_FOUND != val) {
10444 avail_binning_modes[size] = (int32_t)val;
10445 size++;
10446 }
10447 }
10448 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10449 avail_binning_modes, size);
10450 }
10451
10452 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10453 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10454 size = 0;
10455 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10456 for (size_t i = 0; i < count; i++) {
10457 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10458 gCamCapability[cameraId]->supported_aec_modes[i]);
10459 if (NAME_NOT_FOUND != val)
10460 available_aec_modes[size++] = val;
10461 }
10462 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10463 available_aec_modes, size);
10464 }
10465
10466 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10467 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10468 size = 0;
10469 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10470 for (size_t i = 0; i < count; i++) {
10471 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10472 gCamCapability[cameraId]->supported_iso_modes[i]);
10473 if (NAME_NOT_FOUND != val)
10474 available_iso_modes[size++] = val;
10475 }
10476 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10477 available_iso_modes, size);
10478 }
10479
10480 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010481 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010482 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10483 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10484 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10485
10486 int32_t available_saturation_range[4];
10487 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10488 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10489 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10490 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10491 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10492 available_saturation_range, 4);
10493
10494 uint8_t is_hdr_values[2];
10495 is_hdr_values[0] = 0;
10496 is_hdr_values[1] = 1;
10497 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10498 is_hdr_values, 2);
10499
10500 float is_hdr_confidence_range[2];
10501 is_hdr_confidence_range[0] = 0.0;
10502 is_hdr_confidence_range[1] = 1.0;
10503 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10504 is_hdr_confidence_range, 2);
10505
Emilian Peev0a972ef2017-03-16 10:25:53 +000010506 size_t eepromLength = strnlen(
10507 reinterpret_cast<const char *>(
10508 gCamCapability[cameraId]->eeprom_version_info),
10509 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10510 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010511 char easelInfo[] = ",E:N";
10512 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10513 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10514 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010515 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10516 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010517 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010518 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10519 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10520 }
10521
Thierry Strudel3d639192016-09-09 11:52:26 -070010522 gStaticMetadata[cameraId] = staticInfo.release();
10523 return rc;
10524}
10525
10526/*===========================================================================
10527 * FUNCTION : makeTable
10528 *
10529 * DESCRIPTION: make a table of sizes
10530 *
10531 * PARAMETERS :
10532 *
10533 *
10534 *==========================================================================*/
10535void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10536 size_t max_size, int32_t *sizeTable)
10537{
10538 size_t j = 0;
10539 if (size > max_size) {
10540 size = max_size;
10541 }
10542 for (size_t i = 0; i < size; i++) {
10543 sizeTable[j] = dimTable[i].width;
10544 sizeTable[j+1] = dimTable[i].height;
10545 j+=2;
10546 }
10547}
10548
10549/*===========================================================================
10550 * FUNCTION : makeFPSTable
10551 *
10552 * DESCRIPTION: make a table of fps ranges
10553 *
10554 * PARAMETERS :
10555 *
10556 *==========================================================================*/
10557void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10558 size_t max_size, int32_t *fpsRangesTable)
10559{
10560 size_t j = 0;
10561 if (size > max_size) {
10562 size = max_size;
10563 }
10564 for (size_t i = 0; i < size; i++) {
10565 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10566 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10567 j+=2;
10568 }
10569}
10570
10571/*===========================================================================
10572 * FUNCTION : makeOverridesList
10573 *
10574 * DESCRIPTION: make a list of scene mode overrides
10575 *
10576 * PARAMETERS :
10577 *
10578 *
10579 *==========================================================================*/
10580void QCamera3HardwareInterface::makeOverridesList(
10581 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10582 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10583{
10584 /*daemon will give a list of overrides for all scene modes.
10585 However we should send the fwk only the overrides for the scene modes
10586 supported by the framework*/
10587 size_t j = 0;
10588 if (size > max_size) {
10589 size = max_size;
10590 }
10591 size_t focus_count = CAM_FOCUS_MODE_MAX;
10592 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10593 focus_count);
10594 for (size_t i = 0; i < size; i++) {
10595 bool supt = false;
10596 size_t index = supported_indexes[i];
10597 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10598 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10599 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10600 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10601 overridesTable[index].awb_mode);
10602 if (NAME_NOT_FOUND != val) {
10603 overridesList[j+1] = (uint8_t)val;
10604 }
10605 uint8_t focus_override = overridesTable[index].af_mode;
10606 for (size_t k = 0; k < focus_count; k++) {
10607 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10608 supt = true;
10609 break;
10610 }
10611 }
10612 if (supt) {
10613 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10614 focus_override);
10615 if (NAME_NOT_FOUND != val) {
10616 overridesList[j+2] = (uint8_t)val;
10617 }
10618 } else {
10619 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10620 }
10621 j+=3;
10622 }
10623}
10624
10625/*===========================================================================
10626 * FUNCTION : filterJpegSizes
10627 *
10628 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10629 * could be downscaled to
10630 *
10631 * PARAMETERS :
10632 *
10633 * RETURN : length of jpegSizes array
10634 *==========================================================================*/
10635
10636size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10637 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10638 uint8_t downscale_factor)
10639{
10640 if (0 == downscale_factor) {
10641 downscale_factor = 1;
10642 }
10643
10644 int32_t min_width = active_array_size.width / downscale_factor;
10645 int32_t min_height = active_array_size.height / downscale_factor;
10646 size_t jpegSizesCnt = 0;
10647 if (processedSizesCnt > maxCount) {
10648 processedSizesCnt = maxCount;
10649 }
10650 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10651 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10652 jpegSizes[jpegSizesCnt] = processedSizes[i];
10653 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10654 jpegSizesCnt += 2;
10655 }
10656 }
10657 return jpegSizesCnt;
10658}
10659
10660/*===========================================================================
10661 * FUNCTION : computeNoiseModelEntryS
10662 *
10663 * DESCRIPTION: function to map a given sensitivity to the S noise
10664 * model parameters in the DNG noise model.
10665 *
10666 * PARAMETERS : sens : the sensor sensitivity
10667 *
10668 ** RETURN : S (sensor amplification) noise
10669 *
10670 *==========================================================================*/
10671double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10672 double s = gCamCapability[mCameraId]->gradient_S * sens +
10673 gCamCapability[mCameraId]->offset_S;
10674 return ((s < 0.0) ? 0.0 : s);
10675}
10676
10677/*===========================================================================
10678 * FUNCTION : computeNoiseModelEntryO
10679 *
10680 * DESCRIPTION: function to map a given sensitivity to the O noise
10681 * model parameters in the DNG noise model.
10682 *
10683 * PARAMETERS : sens : the sensor sensitivity
10684 *
10685 ** RETURN : O (sensor readout) noise
10686 *
10687 *==========================================================================*/
10688double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10689 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10690 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10691 1.0 : (1.0 * sens / max_analog_sens);
10692 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10693 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10694 return ((o < 0.0) ? 0.0 : o);
10695}
10696
10697/*===========================================================================
10698 * FUNCTION : getSensorSensitivity
10699 *
10700 * DESCRIPTION: convert iso_mode to an integer value
10701 *
10702 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10703 *
10704 ** RETURN : sensitivity supported by sensor
10705 *
10706 *==========================================================================*/
10707int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10708{
10709 int32_t sensitivity;
10710
10711 switch (iso_mode) {
10712 case CAM_ISO_MODE_100:
10713 sensitivity = 100;
10714 break;
10715 case CAM_ISO_MODE_200:
10716 sensitivity = 200;
10717 break;
10718 case CAM_ISO_MODE_400:
10719 sensitivity = 400;
10720 break;
10721 case CAM_ISO_MODE_800:
10722 sensitivity = 800;
10723 break;
10724 case CAM_ISO_MODE_1600:
10725 sensitivity = 1600;
10726 break;
10727 default:
10728 sensitivity = -1;
10729 break;
10730 }
10731 return sensitivity;
10732}
10733
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010734int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010735 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010736 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10737 // to connect to Easel.
10738 bool doNotpowerOnEasel =
10739 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10740
10741 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010742 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10743 return OK;
10744 }
10745
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010746 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010747 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010748 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010749 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010750 return res;
10751 }
10752
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010753 EaselManagerClientOpened = true;
10754
10755 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010756 if (res != OK) {
10757 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10758 }
10759
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010760 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010761 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010762
10763 // Expose enableZsl key only when HDR+ mode is enabled.
10764 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010765 }
10766
10767 return OK;
10768}
10769
Thierry Strudel3d639192016-09-09 11:52:26 -070010770/*===========================================================================
10771 * FUNCTION : getCamInfo
10772 *
10773 * DESCRIPTION: query camera capabilities
10774 *
10775 * PARAMETERS :
10776 * @cameraId : camera Id
10777 * @info : camera info struct to be filled in with camera capabilities
10778 *
10779 * RETURN : int type of status
10780 * NO_ERROR -- success
10781 * none-zero failure code
10782 *==========================================================================*/
10783int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10784 struct camera_info *info)
10785{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010786 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010787 int rc = 0;
10788
10789 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010790
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010791 {
10792 Mutex::Autolock l(gHdrPlusClientLock);
10793 rc = initHdrPlusClientLocked();
10794 if (rc != OK) {
10795 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10796 pthread_mutex_unlock(&gCamLock);
10797 return rc;
10798 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010799 }
10800
Thierry Strudel3d639192016-09-09 11:52:26 -070010801 if (NULL == gCamCapability[cameraId]) {
10802 rc = initCapabilities(cameraId);
10803 if (rc < 0) {
10804 pthread_mutex_unlock(&gCamLock);
10805 return rc;
10806 }
10807 }
10808
10809 if (NULL == gStaticMetadata[cameraId]) {
10810 rc = initStaticMetadata(cameraId);
10811 if (rc < 0) {
10812 pthread_mutex_unlock(&gCamLock);
10813 return rc;
10814 }
10815 }
10816
10817 switch(gCamCapability[cameraId]->position) {
10818 case CAM_POSITION_BACK:
10819 case CAM_POSITION_BACK_AUX:
10820 info->facing = CAMERA_FACING_BACK;
10821 break;
10822
10823 case CAM_POSITION_FRONT:
10824 case CAM_POSITION_FRONT_AUX:
10825 info->facing = CAMERA_FACING_FRONT;
10826 break;
10827
10828 default:
10829 LOGE("Unknown position type %d for camera id:%d",
10830 gCamCapability[cameraId]->position, cameraId);
10831 rc = -1;
10832 break;
10833 }
10834
10835
10836 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010837#ifndef USE_HAL_3_3
10838 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10839#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010840 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010841#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010842 info->static_camera_characteristics = gStaticMetadata[cameraId];
10843
10844 //For now assume both cameras can operate independently.
10845 info->conflicting_devices = NULL;
10846 info->conflicting_devices_length = 0;
10847
10848 //resource cost is 100 * MIN(1.0, m/M),
10849 //where m is throughput requirement with maximum stream configuration
10850 //and M is CPP maximum throughput.
10851 float max_fps = 0.0;
10852 for (uint32_t i = 0;
10853 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10854 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10855 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10856 }
10857 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10858 gCamCapability[cameraId]->active_array_size.width *
10859 gCamCapability[cameraId]->active_array_size.height * max_fps /
10860 gCamCapability[cameraId]->max_pixel_bandwidth;
10861 info->resource_cost = 100 * MIN(1.0, ratio);
10862 LOGI("camera %d resource cost is %d", cameraId,
10863 info->resource_cost);
10864
10865 pthread_mutex_unlock(&gCamLock);
10866 return rc;
10867}
10868
10869/*===========================================================================
10870 * FUNCTION : translateCapabilityToMetadata
10871 *
10872 * DESCRIPTION: translate the capability into camera_metadata_t
10873 *
10874 * PARAMETERS : type of the request
10875 *
10876 *
10877 * RETURN : success: camera_metadata_t*
10878 * failure: NULL
10879 *
10880 *==========================================================================*/
10881camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10882{
10883 if (mDefaultMetadata[type] != NULL) {
10884 return mDefaultMetadata[type];
10885 }
10886 //first time we are handling this request
10887 //fill up the metadata structure using the wrapper class
10888 CameraMetadata settings;
10889 //translate from cam_capability_t to camera_metadata_tag_t
10890 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10891 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10892 int32_t defaultRequestID = 0;
10893 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10894
10895 /* OIS disable */
10896 char ois_prop[PROPERTY_VALUE_MAX];
10897 memset(ois_prop, 0, sizeof(ois_prop));
10898 property_get("persist.camera.ois.disable", ois_prop, "0");
10899 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10900
10901 /* Force video to use OIS */
10902 char videoOisProp[PROPERTY_VALUE_MAX];
10903 memset(videoOisProp, 0, sizeof(videoOisProp));
10904 property_get("persist.camera.ois.video", videoOisProp, "1");
10905 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010906
10907 // Hybrid AE enable/disable
10908 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10909 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10910 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10911 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10912
Thierry Strudel3d639192016-09-09 11:52:26 -070010913 uint8_t controlIntent = 0;
10914 uint8_t focusMode;
10915 uint8_t vsMode;
10916 uint8_t optStabMode;
10917 uint8_t cacMode;
10918 uint8_t edge_mode;
10919 uint8_t noise_red_mode;
10920 uint8_t tonemap_mode;
10921 bool highQualityModeEntryAvailable = FALSE;
10922 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010923 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010924 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10925 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010926 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010927 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010928 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010929
Thierry Strudel3d639192016-09-09 11:52:26 -070010930 switch (type) {
10931 case CAMERA3_TEMPLATE_PREVIEW:
10932 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10933 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10934 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10935 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10936 edge_mode = ANDROID_EDGE_MODE_FAST;
10937 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10938 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10939 break;
10940 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10941 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10942 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10943 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10944 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10945 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10946 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10947 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10948 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10949 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10950 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10951 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10952 highQualityModeEntryAvailable = TRUE;
10953 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10954 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10955 fastModeEntryAvailable = TRUE;
10956 }
10957 }
10958 if (highQualityModeEntryAvailable) {
10959 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10960 } else if (fastModeEntryAvailable) {
10961 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10962 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010963 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10964 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10965 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010966 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010967 break;
10968 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10969 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10970 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10971 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010972 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10973 edge_mode = ANDROID_EDGE_MODE_FAST;
10974 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10975 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10976 if (forceVideoOis)
10977 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10978 break;
10979 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10980 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10981 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10982 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010983 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10984 edge_mode = ANDROID_EDGE_MODE_FAST;
10985 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10986 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10987 if (forceVideoOis)
10988 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10989 break;
10990 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10991 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10992 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10993 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10994 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10995 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10996 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10997 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10998 break;
10999 case CAMERA3_TEMPLATE_MANUAL:
11000 edge_mode = ANDROID_EDGE_MODE_FAST;
11001 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11002 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11003 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11004 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11005 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11006 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11007 break;
11008 default:
11009 edge_mode = ANDROID_EDGE_MODE_FAST;
11010 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11011 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11012 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11013 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11014 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11015 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11016 break;
11017 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011018 // Set CAC to OFF if underlying device doesn't support
11019 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11020 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11021 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011022 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11023 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11024 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11025 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11026 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11027 }
11028 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011029 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011030 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011031
11032 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11033 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11034 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11035 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11036 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11037 || ois_disable)
11038 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11039 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011040 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011041
11042 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11043 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11044
11045 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11046 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11047
11048 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11049 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11050
11051 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11052 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11053
11054 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11055 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11056
11057 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11058 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11059
11060 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11061 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11062
11063 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11064 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11065
11066 /*flash*/
11067 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11068 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11069
11070 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11071 settings.update(ANDROID_FLASH_FIRING_POWER,
11072 &flashFiringLevel, 1);
11073
11074 /* lens */
11075 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11076 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11077
11078 if (gCamCapability[mCameraId]->filter_densities_count) {
11079 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11080 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11081 gCamCapability[mCameraId]->filter_densities_count);
11082 }
11083
11084 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11085 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11086
Thierry Strudel3d639192016-09-09 11:52:26 -070011087 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11088 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11089
11090 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11091 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11092
11093 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11094 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11095
11096 /* face detection (default to OFF) */
11097 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11098 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11099
Thierry Strudel54dc9782017-02-15 12:12:10 -080011100 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11101 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011102
11103 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11104 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11105
11106 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11107 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11108
Thierry Strudel3d639192016-09-09 11:52:26 -070011109
11110 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11111 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11112
11113 /* Exposure time(Update the Min Exposure Time)*/
11114 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11115 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11116
11117 /* frame duration */
11118 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11119 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11120
11121 /* sensitivity */
11122 static const int32_t default_sensitivity = 100;
11123 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011124#ifndef USE_HAL_3_3
11125 static const int32_t default_isp_sensitivity =
11126 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11127 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11128#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011129
11130 /*edge mode*/
11131 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11132
11133 /*noise reduction mode*/
11134 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11135
11136 /*color correction mode*/
11137 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11138 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11139
11140 /*transform matrix mode*/
11141 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11142
11143 int32_t scaler_crop_region[4];
11144 scaler_crop_region[0] = 0;
11145 scaler_crop_region[1] = 0;
11146 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11147 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11148 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11149
11150 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11151 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11152
11153 /*focus distance*/
11154 float focus_distance = 0.0;
11155 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11156
11157 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011158 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011159 float max_range = 0.0;
11160 float max_fixed_fps = 0.0;
11161 int32_t fps_range[2] = {0, 0};
11162 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11163 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011164 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11165 TEMPLATE_MAX_PREVIEW_FPS) {
11166 continue;
11167 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011168 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11169 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11170 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11171 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11172 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11173 if (range > max_range) {
11174 fps_range[0] =
11175 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11176 fps_range[1] =
11177 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11178 max_range = range;
11179 }
11180 } else {
11181 if (range < 0.01 && max_fixed_fps <
11182 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11183 fps_range[0] =
11184 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11185 fps_range[1] =
11186 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11187 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11188 }
11189 }
11190 }
11191 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11192
11193 /*precapture trigger*/
11194 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11195 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11196
11197 /*af trigger*/
11198 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11199 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11200
11201 /* ae & af regions */
11202 int32_t active_region[] = {
11203 gCamCapability[mCameraId]->active_array_size.left,
11204 gCamCapability[mCameraId]->active_array_size.top,
11205 gCamCapability[mCameraId]->active_array_size.left +
11206 gCamCapability[mCameraId]->active_array_size.width,
11207 gCamCapability[mCameraId]->active_array_size.top +
11208 gCamCapability[mCameraId]->active_array_size.height,
11209 0};
11210 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11211 sizeof(active_region) / sizeof(active_region[0]));
11212 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11213 sizeof(active_region) / sizeof(active_region[0]));
11214
11215 /* black level lock */
11216 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11217 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11218
Thierry Strudel3d639192016-09-09 11:52:26 -070011219 //special defaults for manual template
11220 if (type == CAMERA3_TEMPLATE_MANUAL) {
11221 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11222 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11223
11224 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11225 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11226
11227 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11228 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11229
11230 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11231 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11232
11233 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11234 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11235
11236 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11237 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11238 }
11239
11240
11241 /* TNR
11242 * We'll use this location to determine which modes TNR will be set.
11243 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11244 * This is not to be confused with linking on a per stream basis that decision
11245 * is still on per-session basis and will be handled as part of config stream
11246 */
11247 uint8_t tnr_enable = 0;
11248
11249 if (m_bTnrPreview || m_bTnrVideo) {
11250
11251 switch (type) {
11252 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11253 tnr_enable = 1;
11254 break;
11255
11256 default:
11257 tnr_enable = 0;
11258 break;
11259 }
11260
11261 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11262 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11263 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11264
11265 LOGD("TNR:%d with process plate %d for template:%d",
11266 tnr_enable, tnr_process_type, type);
11267 }
11268
11269 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011270 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011271 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11272
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011273 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011274 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11275
Shuzhen Wang920ea402017-05-03 08:49:39 -070011276 uint8_t related_camera_id = mCameraId;
11277 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011278
11279 /* CDS default */
11280 char prop[PROPERTY_VALUE_MAX];
11281 memset(prop, 0, sizeof(prop));
11282 property_get("persist.camera.CDS", prop, "Auto");
11283 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11284 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11285 if (CAM_CDS_MODE_MAX == cds_mode) {
11286 cds_mode = CAM_CDS_MODE_AUTO;
11287 }
11288
11289 /* Disabling CDS in templates which have TNR enabled*/
11290 if (tnr_enable)
11291 cds_mode = CAM_CDS_MODE_OFF;
11292
11293 int32_t mode = cds_mode;
11294 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011295
Thierry Strudel269c81a2016-10-12 12:13:59 -070011296 /* Manual Convergence AEC Speed is disabled by default*/
11297 float default_aec_speed = 0;
11298 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11299
11300 /* Manual Convergence AWB Speed is disabled by default*/
11301 float default_awb_speed = 0;
11302 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11303
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011304 // Set instant AEC to normal convergence by default
11305 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11306 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11307
Shuzhen Wang19463d72016-03-08 11:09:52 -080011308 /* hybrid ae */
11309 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11310
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011311 if (gExposeEnableZslKey) {
11312 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11313 }
11314
Thierry Strudel3d639192016-09-09 11:52:26 -070011315 mDefaultMetadata[type] = settings.release();
11316
11317 return mDefaultMetadata[type];
11318}
11319
11320/*===========================================================================
11321 * FUNCTION : setFrameParameters
11322 *
11323 * DESCRIPTION: set parameters per frame as requested in the metadata from
11324 * framework
11325 *
11326 * PARAMETERS :
11327 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011328 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011329 * @blob_request: Whether this request is a blob request or not
11330 *
11331 * RETURN : success: NO_ERROR
11332 * failure:
11333 *==========================================================================*/
11334int QCamera3HardwareInterface::setFrameParameters(
11335 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011336 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011337 int blob_request,
11338 uint32_t snapshotStreamId)
11339{
11340 /*translate from camera_metadata_t type to parm_type_t*/
11341 int rc = 0;
11342 int32_t hal_version = CAM_HAL_V3;
11343
11344 clear_metadata_buffer(mParameters);
11345 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11346 LOGE("Failed to set hal version in the parameters");
11347 return BAD_VALUE;
11348 }
11349
11350 /*we need to update the frame number in the parameters*/
11351 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11352 request->frame_number)) {
11353 LOGE("Failed to set the frame number in the parameters");
11354 return BAD_VALUE;
11355 }
11356
11357 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011358 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011359 LOGE("Failed to set stream type mask in the parameters");
11360 return BAD_VALUE;
11361 }
11362
11363 if (mUpdateDebugLevel) {
11364 uint32_t dummyDebugLevel = 0;
11365 /* The value of dummyDebugLevel is irrelavent. On
11366 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11367 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11368 dummyDebugLevel)) {
11369 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11370 return BAD_VALUE;
11371 }
11372 mUpdateDebugLevel = false;
11373 }
11374
11375 if(request->settings != NULL){
11376 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11377 if (blob_request)
11378 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11379 }
11380
11381 return rc;
11382}
11383
11384/*===========================================================================
11385 * FUNCTION : setReprocParameters
11386 *
11387 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11388 * return it.
11389 *
11390 * PARAMETERS :
11391 * @request : request that needs to be serviced
11392 *
11393 * RETURN : success: NO_ERROR
11394 * failure:
11395 *==========================================================================*/
11396int32_t QCamera3HardwareInterface::setReprocParameters(
11397 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11398 uint32_t snapshotStreamId)
11399{
11400 /*translate from camera_metadata_t type to parm_type_t*/
11401 int rc = 0;
11402
11403 if (NULL == request->settings){
11404 LOGE("Reprocess settings cannot be NULL");
11405 return BAD_VALUE;
11406 }
11407
11408 if (NULL == reprocParam) {
11409 LOGE("Invalid reprocessing metadata buffer");
11410 return BAD_VALUE;
11411 }
11412 clear_metadata_buffer(reprocParam);
11413
11414 /*we need to update the frame number in the parameters*/
11415 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11416 request->frame_number)) {
11417 LOGE("Failed to set the frame number in the parameters");
11418 return BAD_VALUE;
11419 }
11420
11421 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11422 if (rc < 0) {
11423 LOGE("Failed to translate reproc request");
11424 return rc;
11425 }
11426
11427 CameraMetadata frame_settings;
11428 frame_settings = request->settings;
11429 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11430 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11431 int32_t *crop_count =
11432 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11433 int32_t *crop_data =
11434 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11435 int32_t *roi_map =
11436 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11437 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11438 cam_crop_data_t crop_meta;
11439 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11440 crop_meta.num_of_streams = 1;
11441 crop_meta.crop_info[0].crop.left = crop_data[0];
11442 crop_meta.crop_info[0].crop.top = crop_data[1];
11443 crop_meta.crop_info[0].crop.width = crop_data[2];
11444 crop_meta.crop_info[0].crop.height = crop_data[3];
11445
11446 crop_meta.crop_info[0].roi_map.left =
11447 roi_map[0];
11448 crop_meta.crop_info[0].roi_map.top =
11449 roi_map[1];
11450 crop_meta.crop_info[0].roi_map.width =
11451 roi_map[2];
11452 crop_meta.crop_info[0].roi_map.height =
11453 roi_map[3];
11454
11455 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11456 rc = BAD_VALUE;
11457 }
11458 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11459 request->input_buffer->stream,
11460 crop_meta.crop_info[0].crop.left,
11461 crop_meta.crop_info[0].crop.top,
11462 crop_meta.crop_info[0].crop.width,
11463 crop_meta.crop_info[0].crop.height);
11464 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11465 request->input_buffer->stream,
11466 crop_meta.crop_info[0].roi_map.left,
11467 crop_meta.crop_info[0].roi_map.top,
11468 crop_meta.crop_info[0].roi_map.width,
11469 crop_meta.crop_info[0].roi_map.height);
11470 } else {
11471 LOGE("Invalid reprocess crop count %d!", *crop_count);
11472 }
11473 } else {
11474 LOGE("No crop data from matching output stream");
11475 }
11476
11477 /* These settings are not needed for regular requests so handle them specially for
11478 reprocess requests; information needed for EXIF tags */
11479 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11480 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11481 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11482 if (NAME_NOT_FOUND != val) {
11483 uint32_t flashMode = (uint32_t)val;
11484 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11485 rc = BAD_VALUE;
11486 }
11487 } else {
11488 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11489 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11490 }
11491 } else {
11492 LOGH("No flash mode in reprocess settings");
11493 }
11494
11495 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11496 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11497 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11498 rc = BAD_VALUE;
11499 }
11500 } else {
11501 LOGH("No flash state in reprocess settings");
11502 }
11503
11504 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11505 uint8_t *reprocessFlags =
11506 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11507 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11508 *reprocessFlags)) {
11509 rc = BAD_VALUE;
11510 }
11511 }
11512
Thierry Strudel54dc9782017-02-15 12:12:10 -080011513 // Add exif debug data to internal metadata
11514 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11515 mm_jpeg_debug_exif_params_t *debug_params =
11516 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11517 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11518 // AE
11519 if (debug_params->ae_debug_params_valid == TRUE) {
11520 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11521 debug_params->ae_debug_params);
11522 }
11523 // AWB
11524 if (debug_params->awb_debug_params_valid == TRUE) {
11525 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11526 debug_params->awb_debug_params);
11527 }
11528 // AF
11529 if (debug_params->af_debug_params_valid == TRUE) {
11530 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11531 debug_params->af_debug_params);
11532 }
11533 // ASD
11534 if (debug_params->asd_debug_params_valid == TRUE) {
11535 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11536 debug_params->asd_debug_params);
11537 }
11538 // Stats
11539 if (debug_params->stats_debug_params_valid == TRUE) {
11540 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11541 debug_params->stats_debug_params);
11542 }
11543 // BE Stats
11544 if (debug_params->bestats_debug_params_valid == TRUE) {
11545 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11546 debug_params->bestats_debug_params);
11547 }
11548 // BHIST
11549 if (debug_params->bhist_debug_params_valid == TRUE) {
11550 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11551 debug_params->bhist_debug_params);
11552 }
11553 // 3A Tuning
11554 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11555 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11556 debug_params->q3a_tuning_debug_params);
11557 }
11558 }
11559
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011560 // Add metadata which reprocess needs
11561 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11562 cam_reprocess_info_t *repro_info =
11563 (cam_reprocess_info_t *)frame_settings.find
11564 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011565 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011566 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011567 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011568 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011569 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011570 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011571 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011572 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011573 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011574 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011575 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011576 repro_info->pipeline_flip);
11577 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11578 repro_info->af_roi);
11579 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11580 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011581 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11582 CAM_INTF_PARM_ROTATION metadata then has been added in
11583 translateToHalMetadata. HAL need to keep this new rotation
11584 metadata. Otherwise, the old rotation info saved in the vendor tag
11585 would be used */
11586 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11587 CAM_INTF_PARM_ROTATION, reprocParam) {
11588 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11589 } else {
11590 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011591 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011592 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011593 }
11594
11595 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11596 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11597 roi.width and roi.height would be the final JPEG size.
11598 For now, HAL only checks this for reprocess request */
11599 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11600 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11601 uint8_t *enable =
11602 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11603 if (*enable == TRUE) {
11604 int32_t *crop_data =
11605 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11606 cam_stream_crop_info_t crop_meta;
11607 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11608 crop_meta.stream_id = 0;
11609 crop_meta.crop.left = crop_data[0];
11610 crop_meta.crop.top = crop_data[1];
11611 crop_meta.crop.width = crop_data[2];
11612 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011613 // The JPEG crop roi should match cpp output size
11614 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11615 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11616 crop_meta.roi_map.left = 0;
11617 crop_meta.roi_map.top = 0;
11618 crop_meta.roi_map.width = cpp_crop->crop.width;
11619 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011620 }
11621 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11622 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011623 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011624 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011625 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11626 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011627 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011628 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11629
11630 // Add JPEG scale information
11631 cam_dimension_t scale_dim;
11632 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11633 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11634 int32_t *roi =
11635 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11636 scale_dim.width = roi[2];
11637 scale_dim.height = roi[3];
11638 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11639 scale_dim);
11640 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11641 scale_dim.width, scale_dim.height, mCameraId);
11642 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011643 }
11644 }
11645
11646 return rc;
11647}
11648
11649/*===========================================================================
11650 * FUNCTION : saveRequestSettings
11651 *
11652 * DESCRIPTION: Add any settings that might have changed to the request settings
11653 * and save the settings to be applied on the frame
11654 *
11655 * PARAMETERS :
11656 * @jpegMetadata : the extracted and/or modified jpeg metadata
11657 * @request : request with initial settings
11658 *
11659 * RETURN :
11660 * camera_metadata_t* : pointer to the saved request settings
11661 *==========================================================================*/
11662camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11663 const CameraMetadata &jpegMetadata,
11664 camera3_capture_request_t *request)
11665{
11666 camera_metadata_t *resultMetadata;
11667 CameraMetadata camMetadata;
11668 camMetadata = request->settings;
11669
11670 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11671 int32_t thumbnail_size[2];
11672 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11673 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11674 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11675 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11676 }
11677
11678 if (request->input_buffer != NULL) {
11679 uint8_t reprocessFlags = 1;
11680 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11681 (uint8_t*)&reprocessFlags,
11682 sizeof(reprocessFlags));
11683 }
11684
11685 resultMetadata = camMetadata.release();
11686 return resultMetadata;
11687}
11688
11689/*===========================================================================
11690 * FUNCTION : setHalFpsRange
11691 *
11692 * DESCRIPTION: set FPS range parameter
11693 *
11694 *
11695 * PARAMETERS :
11696 * @settings : Metadata from framework
11697 * @hal_metadata: Metadata buffer
11698 *
11699 *
11700 * RETURN : success: NO_ERROR
11701 * failure:
11702 *==========================================================================*/
11703int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11704 metadata_buffer_t *hal_metadata)
11705{
11706 int32_t rc = NO_ERROR;
11707 cam_fps_range_t fps_range;
11708 fps_range.min_fps = (float)
11709 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11710 fps_range.max_fps = (float)
11711 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11712 fps_range.video_min_fps = fps_range.min_fps;
11713 fps_range.video_max_fps = fps_range.max_fps;
11714
11715 LOGD("aeTargetFpsRange fps: [%f %f]",
11716 fps_range.min_fps, fps_range.max_fps);
11717 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11718 * follows:
11719 * ---------------------------------------------------------------|
11720 * Video stream is absent in configure_streams |
11721 * (Camcorder preview before the first video record |
11722 * ---------------------------------------------------------------|
11723 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11724 * | | | vid_min/max_fps|
11725 * ---------------------------------------------------------------|
11726 * NO | [ 30, 240] | 240 | [240, 240] |
11727 * |-------------|-------------|----------------|
11728 * | [240, 240] | 240 | [240, 240] |
11729 * ---------------------------------------------------------------|
11730 * Video stream is present in configure_streams |
11731 * ---------------------------------------------------------------|
11732 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11733 * | | | vid_min/max_fps|
11734 * ---------------------------------------------------------------|
11735 * NO | [ 30, 240] | 240 | [240, 240] |
11736 * (camcorder prev |-------------|-------------|----------------|
11737 * after video rec | [240, 240] | 240 | [240, 240] |
11738 * is stopped) | | | |
11739 * ---------------------------------------------------------------|
11740 * YES | [ 30, 240] | 240 | [240, 240] |
11741 * |-------------|-------------|----------------|
11742 * | [240, 240] | 240 | [240, 240] |
11743 * ---------------------------------------------------------------|
11744 * When Video stream is absent in configure_streams,
11745 * preview fps = sensor_fps / batchsize
11746 * Eg: for 240fps at batchSize 4, preview = 60fps
11747 * for 120fps at batchSize 4, preview = 30fps
11748 *
11749 * When video stream is present in configure_streams, preview fps is as per
11750 * the ratio of preview buffers to video buffers requested in process
11751 * capture request
11752 */
11753 mBatchSize = 0;
11754 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11755 fps_range.min_fps = fps_range.video_max_fps;
11756 fps_range.video_min_fps = fps_range.video_max_fps;
11757 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11758 fps_range.max_fps);
11759 if (NAME_NOT_FOUND != val) {
11760 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11761 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11762 return BAD_VALUE;
11763 }
11764
11765 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11766 /* If batchmode is currently in progress and the fps changes,
11767 * set the flag to restart the sensor */
11768 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11769 (mHFRVideoFps != fps_range.max_fps)) {
11770 mNeedSensorRestart = true;
11771 }
11772 mHFRVideoFps = fps_range.max_fps;
11773 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11774 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11775 mBatchSize = MAX_HFR_BATCH_SIZE;
11776 }
11777 }
11778 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11779
11780 }
11781 } else {
11782 /* HFR mode is session param in backend/ISP. This should be reset when
11783 * in non-HFR mode */
11784 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11785 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11786 return BAD_VALUE;
11787 }
11788 }
11789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11790 return BAD_VALUE;
11791 }
11792 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11793 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11794 return rc;
11795}
11796
11797/*===========================================================================
11798 * FUNCTION : translateToHalMetadata
11799 *
11800 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11801 *
11802 *
11803 * PARAMETERS :
11804 * @request : request sent from framework
11805 *
11806 *
11807 * RETURN : success: NO_ERROR
11808 * failure:
11809 *==========================================================================*/
11810int QCamera3HardwareInterface::translateToHalMetadata
11811 (const camera3_capture_request_t *request,
11812 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011813 uint32_t snapshotStreamId) {
11814 if (request == nullptr || hal_metadata == nullptr) {
11815 return BAD_VALUE;
11816 }
11817
11818 int64_t minFrameDuration = getMinFrameDuration(request);
11819
11820 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11821 minFrameDuration);
11822}
11823
11824int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11825 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11826 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11827
Thierry Strudel3d639192016-09-09 11:52:26 -070011828 int rc = 0;
11829 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011830 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011831
11832 /* Do not change the order of the following list unless you know what you are
11833 * doing.
11834 * The order is laid out in such a way that parameters in the front of the table
11835 * may be used to override the parameters later in the table. Examples are:
11836 * 1. META_MODE should precede AEC/AWB/AF MODE
11837 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11838 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11839 * 4. Any mode should precede it's corresponding settings
11840 */
11841 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11842 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11843 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11844 rc = BAD_VALUE;
11845 }
11846 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11847 if (rc != NO_ERROR) {
11848 LOGE("extractSceneMode failed");
11849 }
11850 }
11851
11852 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11853 uint8_t fwk_aeMode =
11854 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11855 uint8_t aeMode;
11856 int32_t redeye;
11857
11858 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11859 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011860 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11861 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011862 } else {
11863 aeMode = CAM_AE_MODE_ON;
11864 }
11865 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11866 redeye = 1;
11867 } else {
11868 redeye = 0;
11869 }
11870
11871 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11872 fwk_aeMode);
11873 if (NAME_NOT_FOUND != val) {
11874 int32_t flashMode = (int32_t)val;
11875 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11876 }
11877
11878 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11879 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11880 rc = BAD_VALUE;
11881 }
11882 }
11883
11884 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11885 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11886 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11887 fwk_whiteLevel);
11888 if (NAME_NOT_FOUND != val) {
11889 uint8_t whiteLevel = (uint8_t)val;
11890 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11891 rc = BAD_VALUE;
11892 }
11893 }
11894 }
11895
11896 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11897 uint8_t fwk_cacMode =
11898 frame_settings.find(
11899 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11900 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11901 fwk_cacMode);
11902 if (NAME_NOT_FOUND != val) {
11903 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11904 bool entryAvailable = FALSE;
11905 // Check whether Frameworks set CAC mode is supported in device or not
11906 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11907 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11908 entryAvailable = TRUE;
11909 break;
11910 }
11911 }
11912 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11913 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11914 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11915 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11916 if (entryAvailable == FALSE) {
11917 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11918 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11919 } else {
11920 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11921 // High is not supported and so set the FAST as spec say's underlying
11922 // device implementation can be the same for both modes.
11923 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11924 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11925 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11926 // in order to avoid the fps drop due to high quality
11927 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11928 } else {
11929 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11930 }
11931 }
11932 }
11933 LOGD("Final cacMode is %d", cacMode);
11934 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11935 rc = BAD_VALUE;
11936 }
11937 } else {
11938 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11939 }
11940 }
11941
Thierry Strudel2896d122017-02-23 19:18:03 -080011942 char af_value[PROPERTY_VALUE_MAX];
11943 property_get("persist.camera.af.infinity", af_value, "0");
11944
Jason Lee84ae9972017-02-24 13:24:24 -080011945 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011946 if (atoi(af_value) == 0) {
11947 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011948 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011949 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11950 fwk_focusMode);
11951 if (NAME_NOT_FOUND != val) {
11952 uint8_t focusMode = (uint8_t)val;
11953 LOGD("set focus mode %d", focusMode);
11954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11955 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11956 rc = BAD_VALUE;
11957 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011958 }
11959 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011960 } else {
11961 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11962 LOGE("Focus forced to infinity %d", focusMode);
11963 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11964 rc = BAD_VALUE;
11965 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011966 }
11967
Jason Lee84ae9972017-02-24 13:24:24 -080011968 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11969 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011970 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11972 focalDistance)) {
11973 rc = BAD_VALUE;
11974 }
11975 }
11976
11977 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11978 uint8_t fwk_antibandingMode =
11979 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11980 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11981 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11982 if (NAME_NOT_FOUND != val) {
11983 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011984 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11985 if (m60HzZone) {
11986 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11987 } else {
11988 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11989 }
11990 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011991 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11992 hal_antibandingMode)) {
11993 rc = BAD_VALUE;
11994 }
11995 }
11996 }
11997
11998 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11999 int32_t expCompensation = frame_settings.find(
12000 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12001 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12002 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12003 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12004 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012005 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012006 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12007 expCompensation)) {
12008 rc = BAD_VALUE;
12009 }
12010 }
12011
12012 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12013 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12014 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12015 rc = BAD_VALUE;
12016 }
12017 }
12018 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12019 rc = setHalFpsRange(frame_settings, hal_metadata);
12020 if (rc != NO_ERROR) {
12021 LOGE("setHalFpsRange failed");
12022 }
12023 }
12024
12025 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12026 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12027 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12028 rc = BAD_VALUE;
12029 }
12030 }
12031
12032 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12033 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12034 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12035 fwk_effectMode);
12036 if (NAME_NOT_FOUND != val) {
12037 uint8_t effectMode = (uint8_t)val;
12038 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12039 rc = BAD_VALUE;
12040 }
12041 }
12042 }
12043
12044 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12045 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12046 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12047 colorCorrectMode)) {
12048 rc = BAD_VALUE;
12049 }
12050 }
12051
12052 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12053 cam_color_correct_gains_t colorCorrectGains;
12054 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12055 colorCorrectGains.gains[i] =
12056 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12057 }
12058 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12059 colorCorrectGains)) {
12060 rc = BAD_VALUE;
12061 }
12062 }
12063
12064 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12065 cam_color_correct_matrix_t colorCorrectTransform;
12066 cam_rational_type_t transform_elem;
12067 size_t num = 0;
12068 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12069 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12070 transform_elem.numerator =
12071 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12072 transform_elem.denominator =
12073 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12074 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12075 num++;
12076 }
12077 }
12078 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12079 colorCorrectTransform)) {
12080 rc = BAD_VALUE;
12081 }
12082 }
12083
12084 cam_trigger_t aecTrigger;
12085 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12086 aecTrigger.trigger_id = -1;
12087 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12088 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12089 aecTrigger.trigger =
12090 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12091 aecTrigger.trigger_id =
12092 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12093 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12094 aecTrigger)) {
12095 rc = BAD_VALUE;
12096 }
12097 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12098 aecTrigger.trigger, aecTrigger.trigger_id);
12099 }
12100
12101 /*af_trigger must come with a trigger id*/
12102 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12103 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12104 cam_trigger_t af_trigger;
12105 af_trigger.trigger =
12106 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12107 af_trigger.trigger_id =
12108 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12109 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12110 rc = BAD_VALUE;
12111 }
12112 LOGD("AfTrigger: %d AfTriggerID: %d",
12113 af_trigger.trigger, af_trigger.trigger_id);
12114 }
12115
12116 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12117 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12118 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12119 rc = BAD_VALUE;
12120 }
12121 }
12122 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12123 cam_edge_application_t edge_application;
12124 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012125
Thierry Strudel3d639192016-09-09 11:52:26 -070012126 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12127 edge_application.sharpness = 0;
12128 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012129 edge_application.sharpness =
12130 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12131 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12132 int32_t sharpness =
12133 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12134 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12135 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12136 LOGD("Setting edge mode sharpness %d", sharpness);
12137 edge_application.sharpness = sharpness;
12138 }
12139 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012140 }
12141 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12142 rc = BAD_VALUE;
12143 }
12144 }
12145
12146 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12147 int32_t respectFlashMode = 1;
12148 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12149 uint8_t fwk_aeMode =
12150 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012151 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12152 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12153 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012154 respectFlashMode = 0;
12155 LOGH("AE Mode controls flash, ignore android.flash.mode");
12156 }
12157 }
12158 if (respectFlashMode) {
12159 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12160 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12161 LOGH("flash mode after mapping %d", val);
12162 // To check: CAM_INTF_META_FLASH_MODE usage
12163 if (NAME_NOT_FOUND != val) {
12164 uint8_t flashMode = (uint8_t)val;
12165 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12166 rc = BAD_VALUE;
12167 }
12168 }
12169 }
12170 }
12171
12172 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12173 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12174 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12175 rc = BAD_VALUE;
12176 }
12177 }
12178
12179 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12180 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12182 flashFiringTime)) {
12183 rc = BAD_VALUE;
12184 }
12185 }
12186
12187 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12188 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12189 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12190 hotPixelMode)) {
12191 rc = BAD_VALUE;
12192 }
12193 }
12194
12195 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12196 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12197 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12198 lensAperture)) {
12199 rc = BAD_VALUE;
12200 }
12201 }
12202
12203 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12204 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12205 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12206 filterDensity)) {
12207 rc = BAD_VALUE;
12208 }
12209 }
12210
12211 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12212 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12213 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12214 focalLength)) {
12215 rc = BAD_VALUE;
12216 }
12217 }
12218
12219 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12220 uint8_t optStabMode =
12221 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12222 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12223 optStabMode)) {
12224 rc = BAD_VALUE;
12225 }
12226 }
12227
12228 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12229 uint8_t videoStabMode =
12230 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12231 LOGD("videoStabMode from APP = %d", videoStabMode);
12232 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12233 videoStabMode)) {
12234 rc = BAD_VALUE;
12235 }
12236 }
12237
12238
12239 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12240 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12241 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12242 noiseRedMode)) {
12243 rc = BAD_VALUE;
12244 }
12245 }
12246
12247 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12248 float reprocessEffectiveExposureFactor =
12249 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12250 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12251 reprocessEffectiveExposureFactor)) {
12252 rc = BAD_VALUE;
12253 }
12254 }
12255
12256 cam_crop_region_t scalerCropRegion;
12257 bool scalerCropSet = false;
12258 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12259 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12260 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12261 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12262 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12263
12264 // Map coordinate system from active array to sensor output.
12265 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12266 scalerCropRegion.width, scalerCropRegion.height);
12267
12268 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12269 scalerCropRegion)) {
12270 rc = BAD_VALUE;
12271 }
12272 scalerCropSet = true;
12273 }
12274
12275 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12276 int64_t sensorExpTime =
12277 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12278 LOGD("setting sensorExpTime %lld", sensorExpTime);
12279 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12280 sensorExpTime)) {
12281 rc = BAD_VALUE;
12282 }
12283 }
12284
12285 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12286 int64_t sensorFrameDuration =
12287 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012288 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12289 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12290 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12291 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12292 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12293 sensorFrameDuration)) {
12294 rc = BAD_VALUE;
12295 }
12296 }
12297
12298 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12299 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12300 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12301 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12302 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12303 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12304 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12305 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12306 sensorSensitivity)) {
12307 rc = BAD_VALUE;
12308 }
12309 }
12310
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012311#ifndef USE_HAL_3_3
12312 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12313 int32_t ispSensitivity =
12314 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12315 if (ispSensitivity <
12316 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12317 ispSensitivity =
12318 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12319 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12320 }
12321 if (ispSensitivity >
12322 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12323 ispSensitivity =
12324 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12325 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12326 }
12327 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12328 ispSensitivity)) {
12329 rc = BAD_VALUE;
12330 }
12331 }
12332#endif
12333
Thierry Strudel3d639192016-09-09 11:52:26 -070012334 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12335 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12336 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12337 rc = BAD_VALUE;
12338 }
12339 }
12340
12341 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12342 uint8_t fwk_facedetectMode =
12343 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12344
12345 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12346 fwk_facedetectMode);
12347
12348 if (NAME_NOT_FOUND != val) {
12349 uint8_t facedetectMode = (uint8_t)val;
12350 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12351 facedetectMode)) {
12352 rc = BAD_VALUE;
12353 }
12354 }
12355 }
12356
Thierry Strudel54dc9782017-02-15 12:12:10 -080012357 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012358 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012359 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012360 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12361 histogramMode)) {
12362 rc = BAD_VALUE;
12363 }
12364 }
12365
12366 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12367 uint8_t sharpnessMapMode =
12368 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12369 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12370 sharpnessMapMode)) {
12371 rc = BAD_VALUE;
12372 }
12373 }
12374
12375 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12376 uint8_t tonemapMode =
12377 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12378 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12379 rc = BAD_VALUE;
12380 }
12381 }
12382 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12383 /*All tonemap channels will have the same number of points*/
12384 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12385 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12386 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12387 cam_rgb_tonemap_curves tonemapCurves;
12388 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12389 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12390 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12391 tonemapCurves.tonemap_points_cnt,
12392 CAM_MAX_TONEMAP_CURVE_SIZE);
12393 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12394 }
12395
12396 /* ch0 = G*/
12397 size_t point = 0;
12398 cam_tonemap_curve_t tonemapCurveGreen;
12399 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12400 for (size_t j = 0; j < 2; j++) {
12401 tonemapCurveGreen.tonemap_points[i][j] =
12402 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12403 point++;
12404 }
12405 }
12406 tonemapCurves.curves[0] = tonemapCurveGreen;
12407
12408 /* ch 1 = B */
12409 point = 0;
12410 cam_tonemap_curve_t tonemapCurveBlue;
12411 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12412 for (size_t j = 0; j < 2; j++) {
12413 tonemapCurveBlue.tonemap_points[i][j] =
12414 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12415 point++;
12416 }
12417 }
12418 tonemapCurves.curves[1] = tonemapCurveBlue;
12419
12420 /* ch 2 = R */
12421 point = 0;
12422 cam_tonemap_curve_t tonemapCurveRed;
12423 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12424 for (size_t j = 0; j < 2; j++) {
12425 tonemapCurveRed.tonemap_points[i][j] =
12426 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12427 point++;
12428 }
12429 }
12430 tonemapCurves.curves[2] = tonemapCurveRed;
12431
12432 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12433 tonemapCurves)) {
12434 rc = BAD_VALUE;
12435 }
12436 }
12437
12438 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12439 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12440 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12441 captureIntent)) {
12442 rc = BAD_VALUE;
12443 }
12444 }
12445
12446 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12447 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12448 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12449 blackLevelLock)) {
12450 rc = BAD_VALUE;
12451 }
12452 }
12453
12454 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12455 uint8_t lensShadingMapMode =
12456 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12457 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12458 lensShadingMapMode)) {
12459 rc = BAD_VALUE;
12460 }
12461 }
12462
12463 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12464 cam_area_t roi;
12465 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012466 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012467
12468 // Map coordinate system from active array to sensor output.
12469 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12470 roi.rect.height);
12471
12472 if (scalerCropSet) {
12473 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12474 }
12475 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12476 rc = BAD_VALUE;
12477 }
12478 }
12479
12480 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12481 cam_area_t roi;
12482 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012483 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012484
12485 // Map coordinate system from active array to sensor output.
12486 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12487 roi.rect.height);
12488
12489 if (scalerCropSet) {
12490 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12491 }
12492 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12493 rc = BAD_VALUE;
12494 }
12495 }
12496
12497 // CDS for non-HFR non-video mode
12498 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12499 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12500 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12501 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12502 LOGE("Invalid CDS mode %d!", *fwk_cds);
12503 } else {
12504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12505 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12506 rc = BAD_VALUE;
12507 }
12508 }
12509 }
12510
Thierry Strudel04e026f2016-10-10 11:27:36 -070012511 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012512 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012513 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012514 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12515 }
12516 if (m_bVideoHdrEnabled)
12517 vhdr = CAM_VIDEO_HDR_MODE_ON;
12518
Thierry Strudel54dc9782017-02-15 12:12:10 -080012519 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12520
12521 if(vhdr != curr_hdr_state)
12522 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12523
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012524 rc = setVideoHdrMode(mParameters, vhdr);
12525 if (rc != NO_ERROR) {
12526 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012527 }
12528
12529 //IR
12530 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12531 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12532 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012533 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12534 uint8_t isIRon = 0;
12535
12536 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012537 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12538 LOGE("Invalid IR mode %d!", fwk_ir);
12539 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012540 if(isIRon != curr_ir_state )
12541 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12542
Thierry Strudel04e026f2016-10-10 11:27:36 -070012543 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12544 CAM_INTF_META_IR_MODE, fwk_ir)) {
12545 rc = BAD_VALUE;
12546 }
12547 }
12548 }
12549
Thierry Strudel54dc9782017-02-15 12:12:10 -080012550 //Binning Correction Mode
12551 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12552 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12553 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12554 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12555 || (0 > fwk_binning_correction)) {
12556 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12557 } else {
12558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12559 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12560 rc = BAD_VALUE;
12561 }
12562 }
12563 }
12564
Thierry Strudel269c81a2016-10-12 12:13:59 -070012565 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12566 float aec_speed;
12567 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12568 LOGD("AEC Speed :%f", aec_speed);
12569 if ( aec_speed < 0 ) {
12570 LOGE("Invalid AEC mode %f!", aec_speed);
12571 } else {
12572 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12573 aec_speed)) {
12574 rc = BAD_VALUE;
12575 }
12576 }
12577 }
12578
12579 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12580 float awb_speed;
12581 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12582 LOGD("AWB Speed :%f", awb_speed);
12583 if ( awb_speed < 0 ) {
12584 LOGE("Invalid AWB mode %f!", awb_speed);
12585 } else {
12586 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12587 awb_speed)) {
12588 rc = BAD_VALUE;
12589 }
12590 }
12591 }
12592
Thierry Strudel3d639192016-09-09 11:52:26 -070012593 // TNR
12594 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12595 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12596 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012597 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012598 cam_denoise_param_t tnr;
12599 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12600 tnr.process_plates =
12601 (cam_denoise_process_type_t)frame_settings.find(
12602 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12603 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012604
12605 if(b_TnrRequested != curr_tnr_state)
12606 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12607
Thierry Strudel3d639192016-09-09 11:52:26 -070012608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12609 rc = BAD_VALUE;
12610 }
12611 }
12612
Thierry Strudel54dc9782017-02-15 12:12:10 -080012613 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012614 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012615 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012616 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12617 *exposure_metering_mode)) {
12618 rc = BAD_VALUE;
12619 }
12620 }
12621
Thierry Strudel3d639192016-09-09 11:52:26 -070012622 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12623 int32_t fwk_testPatternMode =
12624 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12625 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12626 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12627
12628 if (NAME_NOT_FOUND != testPatternMode) {
12629 cam_test_pattern_data_t testPatternData;
12630 memset(&testPatternData, 0, sizeof(testPatternData));
12631 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12632 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12633 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12634 int32_t *fwk_testPatternData =
12635 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12636 testPatternData.r = fwk_testPatternData[0];
12637 testPatternData.b = fwk_testPatternData[3];
12638 switch (gCamCapability[mCameraId]->color_arrangement) {
12639 case CAM_FILTER_ARRANGEMENT_RGGB:
12640 case CAM_FILTER_ARRANGEMENT_GRBG:
12641 testPatternData.gr = fwk_testPatternData[1];
12642 testPatternData.gb = fwk_testPatternData[2];
12643 break;
12644 case CAM_FILTER_ARRANGEMENT_GBRG:
12645 case CAM_FILTER_ARRANGEMENT_BGGR:
12646 testPatternData.gr = fwk_testPatternData[2];
12647 testPatternData.gb = fwk_testPatternData[1];
12648 break;
12649 default:
12650 LOGE("color arrangement %d is not supported",
12651 gCamCapability[mCameraId]->color_arrangement);
12652 break;
12653 }
12654 }
12655 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12656 testPatternData)) {
12657 rc = BAD_VALUE;
12658 }
12659 } else {
12660 LOGE("Invalid framework sensor test pattern mode %d",
12661 fwk_testPatternMode);
12662 }
12663 }
12664
12665 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12666 size_t count = 0;
12667 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12668 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12669 gps_coords.data.d, gps_coords.count, count);
12670 if (gps_coords.count != count) {
12671 rc = BAD_VALUE;
12672 }
12673 }
12674
12675 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12676 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12677 size_t count = 0;
12678 const char *gps_methods_src = (const char *)
12679 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12680 memset(gps_methods, '\0', sizeof(gps_methods));
12681 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12682 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12683 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12684 if (GPS_PROCESSING_METHOD_SIZE != count) {
12685 rc = BAD_VALUE;
12686 }
12687 }
12688
12689 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12690 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12691 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12692 gps_timestamp)) {
12693 rc = BAD_VALUE;
12694 }
12695 }
12696
12697 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12698 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12699 cam_rotation_info_t rotation_info;
12700 if (orientation == 0) {
12701 rotation_info.rotation = ROTATE_0;
12702 } else if (orientation == 90) {
12703 rotation_info.rotation = ROTATE_90;
12704 } else if (orientation == 180) {
12705 rotation_info.rotation = ROTATE_180;
12706 } else if (orientation == 270) {
12707 rotation_info.rotation = ROTATE_270;
12708 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012709 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012710 rotation_info.streamId = snapshotStreamId;
12711 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12712 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12713 rc = BAD_VALUE;
12714 }
12715 }
12716
12717 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12718 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12719 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12720 rc = BAD_VALUE;
12721 }
12722 }
12723
12724 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12725 uint32_t thumb_quality = (uint32_t)
12726 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12727 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12728 thumb_quality)) {
12729 rc = BAD_VALUE;
12730 }
12731 }
12732
12733 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12734 cam_dimension_t dim;
12735 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12736 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12737 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12738 rc = BAD_VALUE;
12739 }
12740 }
12741
12742 // Internal metadata
12743 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12744 size_t count = 0;
12745 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12746 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12747 privatedata.data.i32, privatedata.count, count);
12748 if (privatedata.count != count) {
12749 rc = BAD_VALUE;
12750 }
12751 }
12752
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012753 // ISO/Exposure Priority
12754 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12755 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12756 cam_priority_mode_t mode =
12757 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12758 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12759 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12760 use_iso_exp_pty.previewOnly = FALSE;
12761 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12762 use_iso_exp_pty.value = *ptr;
12763
12764 if(CAM_ISO_PRIORITY == mode) {
12765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12766 use_iso_exp_pty)) {
12767 rc = BAD_VALUE;
12768 }
12769 }
12770 else {
12771 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12772 use_iso_exp_pty)) {
12773 rc = BAD_VALUE;
12774 }
12775 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012776
12777 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12778 rc = BAD_VALUE;
12779 }
12780 }
12781 } else {
12782 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12783 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012784 }
12785 }
12786
12787 // Saturation
12788 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12789 int32_t* use_saturation =
12790 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12791 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12792 rc = BAD_VALUE;
12793 }
12794 }
12795
Thierry Strudel3d639192016-09-09 11:52:26 -070012796 // EV step
12797 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12798 gCamCapability[mCameraId]->exp_compensation_step)) {
12799 rc = BAD_VALUE;
12800 }
12801
12802 // CDS info
12803 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12804 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12805 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12806
12807 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12808 CAM_INTF_META_CDS_DATA, *cdsData)) {
12809 rc = BAD_VALUE;
12810 }
12811 }
12812
Shuzhen Wang19463d72016-03-08 11:09:52 -080012813 // Hybrid AE
12814 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12815 uint8_t *hybrid_ae = (uint8_t *)
12816 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12817
12818 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12819 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12820 rc = BAD_VALUE;
12821 }
12822 }
12823
Shuzhen Wang14415f52016-11-16 18:26:18 -080012824 // Histogram
12825 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12826 uint8_t histogramMode =
12827 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12828 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12829 histogramMode)) {
12830 rc = BAD_VALUE;
12831 }
12832 }
12833
12834 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12835 int32_t histogramBins =
12836 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12837 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12838 histogramBins)) {
12839 rc = BAD_VALUE;
12840 }
12841 }
12842
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012843 // Tracking AF
12844 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12845 uint8_t trackingAfTrigger =
12846 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12848 trackingAfTrigger)) {
12849 rc = BAD_VALUE;
12850 }
12851 }
12852
Thierry Strudel3d639192016-09-09 11:52:26 -070012853 return rc;
12854}
12855
12856/*===========================================================================
12857 * FUNCTION : captureResultCb
12858 *
12859 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12860 *
12861 * PARAMETERS :
12862 * @frame : frame information from mm-camera-interface
12863 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12864 * @userdata: userdata
12865 *
12866 * RETURN : NONE
12867 *==========================================================================*/
12868void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12869 camera3_stream_buffer_t *buffer,
12870 uint32_t frame_number, bool isInputBuffer, void *userdata)
12871{
12872 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12873 if (hw == NULL) {
12874 LOGE("Invalid hw %p", hw);
12875 return;
12876 }
12877
12878 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12879 return;
12880}
12881
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012882/*===========================================================================
12883 * FUNCTION : setBufferErrorStatus
12884 *
12885 * DESCRIPTION: Callback handler for channels to report any buffer errors
12886 *
12887 * PARAMETERS :
12888 * @ch : Channel on which buffer error is reported from
12889 * @frame_number : frame number on which buffer error is reported on
12890 * @buffer_status : buffer error status
12891 * @userdata: userdata
12892 *
12893 * RETURN : NONE
12894 *==========================================================================*/
12895void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12896 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12897{
12898 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12899 if (hw == NULL) {
12900 LOGE("Invalid hw %p", hw);
12901 return;
12902 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012903
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012904 hw->setBufferErrorStatus(ch, frame_number, err);
12905 return;
12906}
12907
12908void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12909 uint32_t frameNumber, camera3_buffer_status_t err)
12910{
12911 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12912 pthread_mutex_lock(&mMutex);
12913
12914 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12915 if (req.frame_number != frameNumber)
12916 continue;
12917 for (auto& k : req.mPendingBufferList) {
12918 if(k.stream->priv == ch) {
12919 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12920 }
12921 }
12922 }
12923
12924 pthread_mutex_unlock(&mMutex);
12925 return;
12926}
Thierry Strudel3d639192016-09-09 11:52:26 -070012927/*===========================================================================
12928 * FUNCTION : initialize
12929 *
12930 * DESCRIPTION: Pass framework callback pointers to HAL
12931 *
12932 * PARAMETERS :
12933 *
12934 *
12935 * RETURN : Success : 0
12936 * Failure: -ENODEV
12937 *==========================================================================*/
12938
12939int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12940 const camera3_callback_ops_t *callback_ops)
12941{
12942 LOGD("E");
12943 QCamera3HardwareInterface *hw =
12944 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12945 if (!hw) {
12946 LOGE("NULL camera device");
12947 return -ENODEV;
12948 }
12949
12950 int rc = hw->initialize(callback_ops);
12951 LOGD("X");
12952 return rc;
12953}
12954
12955/*===========================================================================
12956 * FUNCTION : configure_streams
12957 *
12958 * DESCRIPTION:
12959 *
12960 * PARAMETERS :
12961 *
12962 *
12963 * RETURN : Success: 0
12964 * Failure: -EINVAL (if stream configuration is invalid)
12965 * -ENODEV (fatal error)
12966 *==========================================================================*/
12967
12968int QCamera3HardwareInterface::configure_streams(
12969 const struct camera3_device *device,
12970 camera3_stream_configuration_t *stream_list)
12971{
12972 LOGD("E");
12973 QCamera3HardwareInterface *hw =
12974 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12975 if (!hw) {
12976 LOGE("NULL camera device");
12977 return -ENODEV;
12978 }
12979 int rc = hw->configureStreams(stream_list);
12980 LOGD("X");
12981 return rc;
12982}
12983
12984/*===========================================================================
12985 * FUNCTION : construct_default_request_settings
12986 *
12987 * DESCRIPTION: Configure a settings buffer to meet the required use case
12988 *
12989 * PARAMETERS :
12990 *
12991 *
12992 * RETURN : Success: Return valid metadata
12993 * Failure: Return NULL
12994 *==========================================================================*/
12995const camera_metadata_t* QCamera3HardwareInterface::
12996 construct_default_request_settings(const struct camera3_device *device,
12997 int type)
12998{
12999
13000 LOGD("E");
13001 camera_metadata_t* fwk_metadata = NULL;
13002 QCamera3HardwareInterface *hw =
13003 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13004 if (!hw) {
13005 LOGE("NULL camera device");
13006 return NULL;
13007 }
13008
13009 fwk_metadata = hw->translateCapabilityToMetadata(type);
13010
13011 LOGD("X");
13012 return fwk_metadata;
13013}
13014
13015/*===========================================================================
13016 * FUNCTION : process_capture_request
13017 *
13018 * DESCRIPTION:
13019 *
13020 * PARAMETERS :
13021 *
13022 *
13023 * RETURN :
13024 *==========================================================================*/
13025int QCamera3HardwareInterface::process_capture_request(
13026 const struct camera3_device *device,
13027 camera3_capture_request_t *request)
13028{
13029 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013030 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013031 QCamera3HardwareInterface *hw =
13032 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13033 if (!hw) {
13034 LOGE("NULL camera device");
13035 return -EINVAL;
13036 }
13037
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013038 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013039 LOGD("X");
13040 return rc;
13041}
13042
13043/*===========================================================================
13044 * FUNCTION : dump
13045 *
13046 * DESCRIPTION:
13047 *
13048 * PARAMETERS :
13049 *
13050 *
13051 * RETURN :
13052 *==========================================================================*/
13053
13054void QCamera3HardwareInterface::dump(
13055 const struct camera3_device *device, int fd)
13056{
13057 /* Log level property is read when "adb shell dumpsys media.camera" is
13058 called so that the log level can be controlled without restarting
13059 the media server */
13060 getLogLevel();
13061
13062 LOGD("E");
13063 QCamera3HardwareInterface *hw =
13064 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13065 if (!hw) {
13066 LOGE("NULL camera device");
13067 return;
13068 }
13069
13070 hw->dump(fd);
13071 LOGD("X");
13072 return;
13073}
13074
13075/*===========================================================================
13076 * FUNCTION : flush
13077 *
13078 * DESCRIPTION:
13079 *
13080 * PARAMETERS :
13081 *
13082 *
13083 * RETURN :
13084 *==========================================================================*/
13085
13086int QCamera3HardwareInterface::flush(
13087 const struct camera3_device *device)
13088{
13089 int rc;
13090 LOGD("E");
13091 QCamera3HardwareInterface *hw =
13092 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13093 if (!hw) {
13094 LOGE("NULL camera device");
13095 return -EINVAL;
13096 }
13097
13098 pthread_mutex_lock(&hw->mMutex);
13099 // Validate current state
13100 switch (hw->mState) {
13101 case STARTED:
13102 /* valid state */
13103 break;
13104
13105 case ERROR:
13106 pthread_mutex_unlock(&hw->mMutex);
13107 hw->handleCameraDeviceError();
13108 return -ENODEV;
13109
13110 default:
13111 LOGI("Flush returned during state %d", hw->mState);
13112 pthread_mutex_unlock(&hw->mMutex);
13113 return 0;
13114 }
13115 pthread_mutex_unlock(&hw->mMutex);
13116
13117 rc = hw->flush(true /* restart channels */ );
13118 LOGD("X");
13119 return rc;
13120}
13121
13122/*===========================================================================
13123 * FUNCTION : close_camera_device
13124 *
13125 * DESCRIPTION:
13126 *
13127 * PARAMETERS :
13128 *
13129 *
13130 * RETURN :
13131 *==========================================================================*/
13132int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13133{
13134 int ret = NO_ERROR;
13135 QCamera3HardwareInterface *hw =
13136 reinterpret_cast<QCamera3HardwareInterface *>(
13137 reinterpret_cast<camera3_device_t *>(device)->priv);
13138 if (!hw) {
13139 LOGE("NULL camera device");
13140 return BAD_VALUE;
13141 }
13142
13143 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13144 delete hw;
13145 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013146 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013147 return ret;
13148}
13149
13150/*===========================================================================
13151 * FUNCTION : getWaveletDenoiseProcessPlate
13152 *
13153 * DESCRIPTION: query wavelet denoise process plate
13154 *
13155 * PARAMETERS : None
13156 *
13157 * RETURN : WNR prcocess plate value
13158 *==========================================================================*/
13159cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13160{
13161 char prop[PROPERTY_VALUE_MAX];
13162 memset(prop, 0, sizeof(prop));
13163 property_get("persist.denoise.process.plates", prop, "0");
13164 int processPlate = atoi(prop);
13165 switch(processPlate) {
13166 case 0:
13167 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13168 case 1:
13169 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13170 case 2:
13171 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13172 case 3:
13173 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13174 default:
13175 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13176 }
13177}
13178
13179
13180/*===========================================================================
13181 * FUNCTION : getTemporalDenoiseProcessPlate
13182 *
13183 * DESCRIPTION: query temporal denoise process plate
13184 *
13185 * PARAMETERS : None
13186 *
13187 * RETURN : TNR prcocess plate value
13188 *==========================================================================*/
13189cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13190{
13191 char prop[PROPERTY_VALUE_MAX];
13192 memset(prop, 0, sizeof(prop));
13193 property_get("persist.tnr.process.plates", prop, "0");
13194 int processPlate = atoi(prop);
13195 switch(processPlate) {
13196 case 0:
13197 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13198 case 1:
13199 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13200 case 2:
13201 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13202 case 3:
13203 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13204 default:
13205 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13206 }
13207}
13208
13209
13210/*===========================================================================
13211 * FUNCTION : extractSceneMode
13212 *
13213 * DESCRIPTION: Extract scene mode from frameworks set metadata
13214 *
13215 * PARAMETERS :
13216 * @frame_settings: CameraMetadata reference
13217 * @metaMode: ANDROID_CONTORL_MODE
13218 * @hal_metadata: hal metadata structure
13219 *
13220 * RETURN : None
13221 *==========================================================================*/
13222int32_t QCamera3HardwareInterface::extractSceneMode(
13223 const CameraMetadata &frame_settings, uint8_t metaMode,
13224 metadata_buffer_t *hal_metadata)
13225{
13226 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013227 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13228
13229 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13230 LOGD("Ignoring control mode OFF_KEEP_STATE");
13231 return NO_ERROR;
13232 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013233
13234 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13235 camera_metadata_ro_entry entry =
13236 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13237 if (0 == entry.count)
13238 return rc;
13239
13240 uint8_t fwk_sceneMode = entry.data.u8[0];
13241
13242 int val = lookupHalName(SCENE_MODES_MAP,
13243 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13244 fwk_sceneMode);
13245 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013246 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013247 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013248 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013249 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013250
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013251 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13252 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13253 }
13254
13255 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13256 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013257 cam_hdr_param_t hdr_params;
13258 hdr_params.hdr_enable = 1;
13259 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13260 hdr_params.hdr_need_1x = false;
13261 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13262 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13263 rc = BAD_VALUE;
13264 }
13265 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013266
Thierry Strudel3d639192016-09-09 11:52:26 -070013267 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13268 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13269 rc = BAD_VALUE;
13270 }
13271 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013272
13273 if (mForceHdrSnapshot) {
13274 cam_hdr_param_t hdr_params;
13275 hdr_params.hdr_enable = 1;
13276 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13277 hdr_params.hdr_need_1x = false;
13278 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13279 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13280 rc = BAD_VALUE;
13281 }
13282 }
13283
Thierry Strudel3d639192016-09-09 11:52:26 -070013284 return rc;
13285}
13286
13287/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013288 * FUNCTION : setVideoHdrMode
13289 *
13290 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13291 *
13292 * PARAMETERS :
13293 * @hal_metadata: hal metadata structure
13294 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13295 *
13296 * RETURN : None
13297 *==========================================================================*/
13298int32_t QCamera3HardwareInterface::setVideoHdrMode(
13299 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13300{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013301 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13302 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13303 }
13304
13305 LOGE("Invalid Video HDR mode %d!", vhdr);
13306 return BAD_VALUE;
13307}
13308
13309/*===========================================================================
13310 * FUNCTION : setSensorHDR
13311 *
13312 * DESCRIPTION: Enable/disable sensor HDR.
13313 *
13314 * PARAMETERS :
13315 * @hal_metadata: hal metadata structure
13316 * @enable: boolean whether to enable/disable sensor HDR
13317 *
13318 * RETURN : None
13319 *==========================================================================*/
13320int32_t QCamera3HardwareInterface::setSensorHDR(
13321 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13322{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013323 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013324 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13325
13326 if (enable) {
13327 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13328 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13329 #ifdef _LE_CAMERA_
13330 //Default to staggered HDR for IOT
13331 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13332 #else
13333 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13334 #endif
13335 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13336 }
13337
13338 bool isSupported = false;
13339 switch (sensor_hdr) {
13340 case CAM_SENSOR_HDR_IN_SENSOR:
13341 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13342 CAM_QCOM_FEATURE_SENSOR_HDR) {
13343 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013344 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013345 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013346 break;
13347 case CAM_SENSOR_HDR_ZIGZAG:
13348 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13349 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13350 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013351 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013352 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013353 break;
13354 case CAM_SENSOR_HDR_STAGGERED:
13355 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13356 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13357 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013358 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013359 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013360 break;
13361 case CAM_SENSOR_HDR_OFF:
13362 isSupported = true;
13363 LOGD("Turning off sensor HDR");
13364 break;
13365 default:
13366 LOGE("HDR mode %d not supported", sensor_hdr);
13367 rc = BAD_VALUE;
13368 break;
13369 }
13370
13371 if(isSupported) {
13372 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13373 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13374 rc = BAD_VALUE;
13375 } else {
13376 if(!isVideoHdrEnable)
13377 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013378 }
13379 }
13380 return rc;
13381}
13382
13383/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013384 * FUNCTION : needRotationReprocess
13385 *
13386 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13387 *
13388 * PARAMETERS : none
13389 *
13390 * RETURN : true: needed
13391 * false: no need
13392 *==========================================================================*/
13393bool QCamera3HardwareInterface::needRotationReprocess()
13394{
13395 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13396 // current rotation is not zero, and pp has the capability to process rotation
13397 LOGH("need do reprocess for rotation");
13398 return true;
13399 }
13400
13401 return false;
13402}
13403
13404/*===========================================================================
13405 * FUNCTION : needReprocess
13406 *
13407 * DESCRIPTION: if reprocess in needed
13408 *
13409 * PARAMETERS : none
13410 *
13411 * RETURN : true: needed
13412 * false: no need
13413 *==========================================================================*/
13414bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13415{
13416 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13417 // TODO: add for ZSL HDR later
13418 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13419 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13420 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13421 return true;
13422 } else {
13423 LOGH("already post processed frame");
13424 return false;
13425 }
13426 }
13427 return needRotationReprocess();
13428}
13429
13430/*===========================================================================
13431 * FUNCTION : needJpegExifRotation
13432 *
13433 * DESCRIPTION: if rotation from jpeg is needed
13434 *
13435 * PARAMETERS : none
13436 *
13437 * RETURN : true: needed
13438 * false: no need
13439 *==========================================================================*/
13440bool QCamera3HardwareInterface::needJpegExifRotation()
13441{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013442 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013443 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13444 LOGD("Need use Jpeg EXIF Rotation");
13445 return true;
13446 }
13447 return false;
13448}
13449
13450/*===========================================================================
13451 * FUNCTION : addOfflineReprocChannel
13452 *
13453 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13454 * coming from input channel
13455 *
13456 * PARAMETERS :
13457 * @config : reprocess configuration
13458 * @inputChHandle : pointer to the input (source) channel
13459 *
13460 *
13461 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13462 *==========================================================================*/
13463QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13464 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13465{
13466 int32_t rc = NO_ERROR;
13467 QCamera3ReprocessChannel *pChannel = NULL;
13468
13469 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013470 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13471 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013472 if (NULL == pChannel) {
13473 LOGE("no mem for reprocess channel");
13474 return NULL;
13475 }
13476
13477 rc = pChannel->initialize(IS_TYPE_NONE);
13478 if (rc != NO_ERROR) {
13479 LOGE("init reprocess channel failed, ret = %d", rc);
13480 delete pChannel;
13481 return NULL;
13482 }
13483
13484 // pp feature config
13485 cam_pp_feature_config_t pp_config;
13486 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13487
13488 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13489 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13490 & CAM_QCOM_FEATURE_DSDN) {
13491 //Use CPP CDS incase h/w supports it.
13492 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13493 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13494 }
13495 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13496 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13497 }
13498
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013499 if (config.hdr_param.hdr_enable) {
13500 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13501 pp_config.hdr_param = config.hdr_param;
13502 }
13503
13504 if (mForceHdrSnapshot) {
13505 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13506 pp_config.hdr_param.hdr_enable = 1;
13507 pp_config.hdr_param.hdr_need_1x = 0;
13508 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13509 }
13510
Thierry Strudel3d639192016-09-09 11:52:26 -070013511 rc = pChannel->addReprocStreamsFromSource(pp_config,
13512 config,
13513 IS_TYPE_NONE,
13514 mMetadataChannel);
13515
13516 if (rc != NO_ERROR) {
13517 delete pChannel;
13518 return NULL;
13519 }
13520 return pChannel;
13521}
13522
13523/*===========================================================================
13524 * FUNCTION : getMobicatMask
13525 *
13526 * DESCRIPTION: returns mobicat mask
13527 *
13528 * PARAMETERS : none
13529 *
13530 * RETURN : mobicat mask
13531 *
13532 *==========================================================================*/
13533uint8_t QCamera3HardwareInterface::getMobicatMask()
13534{
13535 return m_MobicatMask;
13536}
13537
13538/*===========================================================================
13539 * FUNCTION : setMobicat
13540 *
13541 * DESCRIPTION: set Mobicat on/off.
13542 *
13543 * PARAMETERS :
13544 * @params : none
13545 *
13546 * RETURN : int32_t type of status
13547 * NO_ERROR -- success
13548 * none-zero failure code
13549 *==========================================================================*/
13550int32_t QCamera3HardwareInterface::setMobicat()
13551{
13552 char value [PROPERTY_VALUE_MAX];
13553 property_get("persist.camera.mobicat", value, "0");
13554 int32_t ret = NO_ERROR;
13555 uint8_t enableMobi = (uint8_t)atoi(value);
13556
13557 if (enableMobi) {
13558 tune_cmd_t tune_cmd;
13559 tune_cmd.type = SET_RELOAD_CHROMATIX;
13560 tune_cmd.module = MODULE_ALL;
13561 tune_cmd.value = TRUE;
13562 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13563 CAM_INTF_PARM_SET_VFE_COMMAND,
13564 tune_cmd);
13565
13566 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13567 CAM_INTF_PARM_SET_PP_COMMAND,
13568 tune_cmd);
13569 }
13570 m_MobicatMask = enableMobi;
13571
13572 return ret;
13573}
13574
13575/*===========================================================================
13576* FUNCTION : getLogLevel
13577*
13578* DESCRIPTION: Reads the log level property into a variable
13579*
13580* PARAMETERS :
13581* None
13582*
13583* RETURN :
13584* None
13585*==========================================================================*/
13586void QCamera3HardwareInterface::getLogLevel()
13587{
13588 char prop[PROPERTY_VALUE_MAX];
13589 uint32_t globalLogLevel = 0;
13590
13591 property_get("persist.camera.hal.debug", prop, "0");
13592 int val = atoi(prop);
13593 if (0 <= val) {
13594 gCamHal3LogLevel = (uint32_t)val;
13595 }
13596
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013597 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013598 gKpiDebugLevel = atoi(prop);
13599
13600 property_get("persist.camera.global.debug", prop, "0");
13601 val = atoi(prop);
13602 if (0 <= val) {
13603 globalLogLevel = (uint32_t)val;
13604 }
13605
13606 /* Highest log level among hal.logs and global.logs is selected */
13607 if (gCamHal3LogLevel < globalLogLevel)
13608 gCamHal3LogLevel = globalLogLevel;
13609
13610 return;
13611}
13612
13613/*===========================================================================
13614 * FUNCTION : validateStreamRotations
13615 *
13616 * DESCRIPTION: Check if the rotations requested are supported
13617 *
13618 * PARAMETERS :
13619 * @stream_list : streams to be configured
13620 *
13621 * RETURN : NO_ERROR on success
13622 * -EINVAL on failure
13623 *
13624 *==========================================================================*/
13625int QCamera3HardwareInterface::validateStreamRotations(
13626 camera3_stream_configuration_t *streamList)
13627{
13628 int rc = NO_ERROR;
13629
13630 /*
13631 * Loop through all streams requested in configuration
13632 * Check if unsupported rotations have been requested on any of them
13633 */
13634 for (size_t j = 0; j < streamList->num_streams; j++){
13635 camera3_stream_t *newStream = streamList->streams[j];
13636
13637 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13638 bool isImplDef = (newStream->format ==
13639 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13640 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13641 isImplDef);
13642
13643 if (isRotated && (!isImplDef || isZsl)) {
13644 LOGE("Error: Unsupported rotation of %d requested for stream"
13645 "type:%d and stream format:%d",
13646 newStream->rotation, newStream->stream_type,
13647 newStream->format);
13648 rc = -EINVAL;
13649 break;
13650 }
13651 }
13652
13653 return rc;
13654}
13655
13656/*===========================================================================
13657* FUNCTION : getFlashInfo
13658*
13659* DESCRIPTION: Retrieve information about whether the device has a flash.
13660*
13661* PARAMETERS :
13662* @cameraId : Camera id to query
13663* @hasFlash : Boolean indicating whether there is a flash device
13664* associated with given camera
13665* @flashNode : If a flash device exists, this will be its device node.
13666*
13667* RETURN :
13668* None
13669*==========================================================================*/
13670void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13671 bool& hasFlash,
13672 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13673{
13674 cam_capability_t* camCapability = gCamCapability[cameraId];
13675 if (NULL == camCapability) {
13676 hasFlash = false;
13677 flashNode[0] = '\0';
13678 } else {
13679 hasFlash = camCapability->flash_available;
13680 strlcpy(flashNode,
13681 (char*)camCapability->flash_dev_name,
13682 QCAMERA_MAX_FILEPATH_LENGTH);
13683 }
13684}
13685
13686/*===========================================================================
13687* FUNCTION : getEepromVersionInfo
13688*
13689* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13690*
13691* PARAMETERS : None
13692*
13693* RETURN : string describing EEPROM version
13694* "\0" if no such info available
13695*==========================================================================*/
13696const char *QCamera3HardwareInterface::getEepromVersionInfo()
13697{
13698 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13699}
13700
13701/*===========================================================================
13702* FUNCTION : getLdafCalib
13703*
13704* DESCRIPTION: Retrieve Laser AF calibration data
13705*
13706* PARAMETERS : None
13707*
13708* RETURN : Two uint32_t describing laser AF calibration data
13709* NULL if none is available.
13710*==========================================================================*/
13711const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13712{
13713 if (mLdafCalibExist) {
13714 return &mLdafCalib[0];
13715 } else {
13716 return NULL;
13717 }
13718}
13719
13720/*===========================================================================
13721 * FUNCTION : dynamicUpdateMetaStreamInfo
13722 *
13723 * DESCRIPTION: This function:
13724 * (1) stops all the channels
13725 * (2) returns error on pending requests and buffers
13726 * (3) sends metastream_info in setparams
13727 * (4) starts all channels
13728 * This is useful when sensor has to be restarted to apply any
13729 * settings such as frame rate from a different sensor mode
13730 *
13731 * PARAMETERS : None
13732 *
13733 * RETURN : NO_ERROR on success
13734 * Error codes on failure
13735 *
13736 *==========================================================================*/
13737int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13738{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013739 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013740 int rc = NO_ERROR;
13741
13742 LOGD("E");
13743
13744 rc = stopAllChannels();
13745 if (rc < 0) {
13746 LOGE("stopAllChannels failed");
13747 return rc;
13748 }
13749
13750 rc = notifyErrorForPendingRequests();
13751 if (rc < 0) {
13752 LOGE("notifyErrorForPendingRequests failed");
13753 return rc;
13754 }
13755
13756 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13757 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13758 "Format:%d",
13759 mStreamConfigInfo.type[i],
13760 mStreamConfigInfo.stream_sizes[i].width,
13761 mStreamConfigInfo.stream_sizes[i].height,
13762 mStreamConfigInfo.postprocess_mask[i],
13763 mStreamConfigInfo.format[i]);
13764 }
13765
13766 /* Send meta stream info once again so that ISP can start */
13767 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13768 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13769 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13770 mParameters);
13771 if (rc < 0) {
13772 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13773 }
13774
13775 rc = startAllChannels();
13776 if (rc < 0) {
13777 LOGE("startAllChannels failed");
13778 return rc;
13779 }
13780
13781 LOGD("X");
13782 return rc;
13783}
13784
13785/*===========================================================================
13786 * FUNCTION : stopAllChannels
13787 *
13788 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13789 *
13790 * PARAMETERS : None
13791 *
13792 * RETURN : NO_ERROR on success
13793 * Error codes on failure
13794 *
13795 *==========================================================================*/
13796int32_t QCamera3HardwareInterface::stopAllChannels()
13797{
13798 int32_t rc = NO_ERROR;
13799
13800 LOGD("Stopping all channels");
13801 // Stop the Streams/Channels
13802 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13803 it != mStreamInfo.end(); it++) {
13804 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13805 if (channel) {
13806 channel->stop();
13807 }
13808 (*it)->status = INVALID;
13809 }
13810
13811 if (mSupportChannel) {
13812 mSupportChannel->stop();
13813 }
13814 if (mAnalysisChannel) {
13815 mAnalysisChannel->stop();
13816 }
13817 if (mRawDumpChannel) {
13818 mRawDumpChannel->stop();
13819 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013820 if (mHdrPlusRawSrcChannel) {
13821 mHdrPlusRawSrcChannel->stop();
13822 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013823 if (mMetadataChannel) {
13824 /* If content of mStreamInfo is not 0, there is metadata stream */
13825 mMetadataChannel->stop();
13826 }
13827
13828 LOGD("All channels stopped");
13829 return rc;
13830}
13831
13832/*===========================================================================
13833 * FUNCTION : startAllChannels
13834 *
13835 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13836 *
13837 * PARAMETERS : None
13838 *
13839 * RETURN : NO_ERROR on success
13840 * Error codes on failure
13841 *
13842 *==========================================================================*/
13843int32_t QCamera3HardwareInterface::startAllChannels()
13844{
13845 int32_t rc = NO_ERROR;
13846
13847 LOGD("Start all channels ");
13848 // Start the Streams/Channels
13849 if (mMetadataChannel) {
13850 /* If content of mStreamInfo is not 0, there is metadata stream */
13851 rc = mMetadataChannel->start();
13852 if (rc < 0) {
13853 LOGE("META channel start failed");
13854 return rc;
13855 }
13856 }
13857 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13858 it != mStreamInfo.end(); it++) {
13859 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13860 if (channel) {
13861 rc = channel->start();
13862 if (rc < 0) {
13863 LOGE("channel start failed");
13864 return rc;
13865 }
13866 }
13867 }
13868 if (mAnalysisChannel) {
13869 mAnalysisChannel->start();
13870 }
13871 if (mSupportChannel) {
13872 rc = mSupportChannel->start();
13873 if (rc < 0) {
13874 LOGE("Support channel start failed");
13875 return rc;
13876 }
13877 }
13878 if (mRawDumpChannel) {
13879 rc = mRawDumpChannel->start();
13880 if (rc < 0) {
13881 LOGE("RAW dump channel start failed");
13882 return rc;
13883 }
13884 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013885 if (mHdrPlusRawSrcChannel) {
13886 rc = mHdrPlusRawSrcChannel->start();
13887 if (rc < 0) {
13888 LOGE("HDR+ RAW channel start failed");
13889 return rc;
13890 }
13891 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013892
13893 LOGD("All channels started");
13894 return rc;
13895}
13896
13897/*===========================================================================
13898 * FUNCTION : notifyErrorForPendingRequests
13899 *
13900 * DESCRIPTION: This function sends error for all the pending requests/buffers
13901 *
13902 * PARAMETERS : None
13903 *
13904 * RETURN : Error codes
13905 * NO_ERROR on success
13906 *
13907 *==========================================================================*/
13908int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13909{
13910 int32_t rc = NO_ERROR;
13911 unsigned int frameNum = 0;
13912 camera3_capture_result_t result;
13913 camera3_stream_buffer_t *pStream_Buf = NULL;
13914
13915 memset(&result, 0, sizeof(camera3_capture_result_t));
13916
13917 if (mPendingRequestsList.size() > 0) {
13918 pendingRequestIterator i = mPendingRequestsList.begin();
13919 frameNum = i->frame_number;
13920 } else {
13921 /* There might still be pending buffers even though there are
13922 no pending requests. Setting the frameNum to MAX so that
13923 all the buffers with smaller frame numbers are returned */
13924 frameNum = UINT_MAX;
13925 }
13926
13927 LOGH("Oldest frame num on mPendingRequestsList = %u",
13928 frameNum);
13929
Emilian Peev7650c122017-01-19 08:24:33 -080013930 notifyErrorFoPendingDepthData(mDepthChannel);
13931
Thierry Strudel3d639192016-09-09 11:52:26 -070013932 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13933 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13934
13935 if (req->frame_number < frameNum) {
13936 // Send Error notify to frameworks for each buffer for which
13937 // metadata buffer is already sent
13938 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13939 req->frame_number, req->mPendingBufferList.size());
13940
13941 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13942 if (NULL == pStream_Buf) {
13943 LOGE("No memory for pending buffers array");
13944 return NO_MEMORY;
13945 }
13946 memset(pStream_Buf, 0,
13947 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13948 result.result = NULL;
13949 result.frame_number = req->frame_number;
13950 result.num_output_buffers = req->mPendingBufferList.size();
13951 result.output_buffers = pStream_Buf;
13952
13953 size_t index = 0;
13954 for (auto info = req->mPendingBufferList.begin();
13955 info != req->mPendingBufferList.end(); ) {
13956
13957 camera3_notify_msg_t notify_msg;
13958 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13959 notify_msg.type = CAMERA3_MSG_ERROR;
13960 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13961 notify_msg.message.error.error_stream = info->stream;
13962 notify_msg.message.error.frame_number = req->frame_number;
13963 pStream_Buf[index].acquire_fence = -1;
13964 pStream_Buf[index].release_fence = -1;
13965 pStream_Buf[index].buffer = info->buffer;
13966 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13967 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013968 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013969 index++;
13970 // Remove buffer from list
13971 info = req->mPendingBufferList.erase(info);
13972 }
13973
13974 // Remove this request from Map
13975 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13976 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13977 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13978
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013979 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013980
13981 delete [] pStream_Buf;
13982 } else {
13983
13984 // Go through the pending requests info and send error request to framework
13985 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13986
13987 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13988
13989 // Send error notify to frameworks
13990 camera3_notify_msg_t notify_msg;
13991 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13992 notify_msg.type = CAMERA3_MSG_ERROR;
13993 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13994 notify_msg.message.error.error_stream = NULL;
13995 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013996 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013997
13998 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13999 if (NULL == pStream_Buf) {
14000 LOGE("No memory for pending buffers array");
14001 return NO_MEMORY;
14002 }
14003 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
14004
14005 result.result = NULL;
14006 result.frame_number = req->frame_number;
14007 result.input_buffer = i->input_buffer;
14008 result.num_output_buffers = req->mPendingBufferList.size();
14009 result.output_buffers = pStream_Buf;
14010
14011 size_t index = 0;
14012 for (auto info = req->mPendingBufferList.begin();
14013 info != req->mPendingBufferList.end(); ) {
14014 pStream_Buf[index].acquire_fence = -1;
14015 pStream_Buf[index].release_fence = -1;
14016 pStream_Buf[index].buffer = info->buffer;
14017 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
14018 pStream_Buf[index].stream = info->stream;
14019 index++;
14020 // Remove buffer from list
14021 info = req->mPendingBufferList.erase(info);
14022 }
14023
14024 // Remove this request from Map
14025 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
14026 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
14027 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
14028
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014029 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014030 delete [] pStream_Buf;
14031 i = erasePendingRequest(i);
14032 }
14033 }
14034
14035 /* Reset pending frame Drop list and requests list */
14036 mPendingFrameDropList.clear();
14037
14038 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
14039 req.mPendingBufferList.clear();
14040 }
14041 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014042 LOGH("Cleared all the pending buffers ");
14043
14044 return rc;
14045}
14046
14047bool QCamera3HardwareInterface::isOnEncoder(
14048 const cam_dimension_t max_viewfinder_size,
14049 uint32_t width, uint32_t height)
14050{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014051 return ((width > (uint32_t)max_viewfinder_size.width) ||
14052 (height > (uint32_t)max_viewfinder_size.height) ||
14053 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14054 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014055}
14056
14057/*===========================================================================
14058 * FUNCTION : setBundleInfo
14059 *
14060 * DESCRIPTION: Set bundle info for all streams that are bundle.
14061 *
14062 * PARAMETERS : None
14063 *
14064 * RETURN : NO_ERROR on success
14065 * Error codes on failure
14066 *==========================================================================*/
14067int32_t QCamera3HardwareInterface::setBundleInfo()
14068{
14069 int32_t rc = NO_ERROR;
14070
14071 if (mChannelHandle) {
14072 cam_bundle_config_t bundleInfo;
14073 memset(&bundleInfo, 0, sizeof(bundleInfo));
14074 rc = mCameraHandle->ops->get_bundle_info(
14075 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14076 if (rc != NO_ERROR) {
14077 LOGE("get_bundle_info failed");
14078 return rc;
14079 }
14080 if (mAnalysisChannel) {
14081 mAnalysisChannel->setBundleInfo(bundleInfo);
14082 }
14083 if (mSupportChannel) {
14084 mSupportChannel->setBundleInfo(bundleInfo);
14085 }
14086 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14087 it != mStreamInfo.end(); it++) {
14088 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14089 channel->setBundleInfo(bundleInfo);
14090 }
14091 if (mRawDumpChannel) {
14092 mRawDumpChannel->setBundleInfo(bundleInfo);
14093 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014094 if (mHdrPlusRawSrcChannel) {
14095 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14096 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014097 }
14098
14099 return rc;
14100}
14101
14102/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014103 * FUNCTION : setInstantAEC
14104 *
14105 * DESCRIPTION: Set Instant AEC related params.
14106 *
14107 * PARAMETERS :
14108 * @meta: CameraMetadata reference
14109 *
14110 * RETURN : NO_ERROR on success
14111 * Error codes on failure
14112 *==========================================================================*/
14113int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14114{
14115 int32_t rc = NO_ERROR;
14116 uint8_t val = 0;
14117 char prop[PROPERTY_VALUE_MAX];
14118
14119 // First try to configure instant AEC from framework metadata
14120 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14121 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14122 }
14123
14124 // If framework did not set this value, try to read from set prop.
14125 if (val == 0) {
14126 memset(prop, 0, sizeof(prop));
14127 property_get("persist.camera.instant.aec", prop, "0");
14128 val = (uint8_t)atoi(prop);
14129 }
14130
14131 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14132 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14133 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14134 mInstantAEC = val;
14135 mInstantAECSettledFrameNumber = 0;
14136 mInstantAecFrameIdxCount = 0;
14137 LOGH("instantAEC value set %d",val);
14138 if (mInstantAEC) {
14139 memset(prop, 0, sizeof(prop));
14140 property_get("persist.camera.ae.instant.bound", prop, "10");
14141 int32_t aec_frame_skip_cnt = atoi(prop);
14142 if (aec_frame_skip_cnt >= 0) {
14143 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14144 } else {
14145 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14146 rc = BAD_VALUE;
14147 }
14148 }
14149 } else {
14150 LOGE("Bad instant aec value set %d", val);
14151 rc = BAD_VALUE;
14152 }
14153 return rc;
14154}
14155
14156/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014157 * FUNCTION : get_num_overall_buffers
14158 *
14159 * DESCRIPTION: Estimate number of pending buffers across all requests.
14160 *
14161 * PARAMETERS : None
14162 *
14163 * RETURN : Number of overall pending buffers
14164 *
14165 *==========================================================================*/
14166uint32_t PendingBuffersMap::get_num_overall_buffers()
14167{
14168 uint32_t sum_buffers = 0;
14169 for (auto &req : mPendingBuffersInRequest) {
14170 sum_buffers += req.mPendingBufferList.size();
14171 }
14172 return sum_buffers;
14173}
14174
14175/*===========================================================================
14176 * FUNCTION : removeBuf
14177 *
14178 * DESCRIPTION: Remove a matching buffer from tracker.
14179 *
14180 * PARAMETERS : @buffer: image buffer for the callback
14181 *
14182 * RETURN : None
14183 *
14184 *==========================================================================*/
14185void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14186{
14187 bool buffer_found = false;
14188 for (auto req = mPendingBuffersInRequest.begin();
14189 req != mPendingBuffersInRequest.end(); req++) {
14190 for (auto k = req->mPendingBufferList.begin();
14191 k != req->mPendingBufferList.end(); k++ ) {
14192 if (k->buffer == buffer) {
14193 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14194 req->frame_number, buffer);
14195 k = req->mPendingBufferList.erase(k);
14196 if (req->mPendingBufferList.empty()) {
14197 // Remove this request from Map
14198 req = mPendingBuffersInRequest.erase(req);
14199 }
14200 buffer_found = true;
14201 break;
14202 }
14203 }
14204 if (buffer_found) {
14205 break;
14206 }
14207 }
14208 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14209 get_num_overall_buffers());
14210}
14211
14212/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014213 * FUNCTION : getBufErrStatus
14214 *
14215 * DESCRIPTION: get buffer error status
14216 *
14217 * PARAMETERS : @buffer: buffer handle
14218 *
14219 * RETURN : Error status
14220 *
14221 *==========================================================================*/
14222int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14223{
14224 for (auto& req : mPendingBuffersInRequest) {
14225 for (auto& k : req.mPendingBufferList) {
14226 if (k.buffer == buffer)
14227 return k.bufStatus;
14228 }
14229 }
14230 return CAMERA3_BUFFER_STATUS_OK;
14231}
14232
14233/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014234 * FUNCTION : setPAAFSupport
14235 *
14236 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14237 * feature mask according to stream type and filter
14238 * arrangement
14239 *
14240 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14241 * @stream_type: stream type
14242 * @filter_arrangement: filter arrangement
14243 *
14244 * RETURN : None
14245 *==========================================================================*/
14246void QCamera3HardwareInterface::setPAAFSupport(
14247 cam_feature_mask_t& feature_mask,
14248 cam_stream_type_t stream_type,
14249 cam_color_filter_arrangement_t filter_arrangement)
14250{
Thierry Strudel3d639192016-09-09 11:52:26 -070014251 switch (filter_arrangement) {
14252 case CAM_FILTER_ARRANGEMENT_RGGB:
14253 case CAM_FILTER_ARRANGEMENT_GRBG:
14254 case CAM_FILTER_ARRANGEMENT_GBRG:
14255 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014256 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14257 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014258 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014259 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14260 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014261 }
14262 break;
14263 case CAM_FILTER_ARRANGEMENT_Y:
14264 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14265 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14266 }
14267 break;
14268 default:
14269 break;
14270 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014271 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14272 feature_mask, stream_type, filter_arrangement);
14273
14274
Thierry Strudel3d639192016-09-09 11:52:26 -070014275}
14276
14277/*===========================================================================
14278* FUNCTION : getSensorMountAngle
14279*
14280* DESCRIPTION: Retrieve sensor mount angle
14281*
14282* PARAMETERS : None
14283*
14284* RETURN : sensor mount angle in uint32_t
14285*==========================================================================*/
14286uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14287{
14288 return gCamCapability[mCameraId]->sensor_mount_angle;
14289}
14290
14291/*===========================================================================
14292* FUNCTION : getRelatedCalibrationData
14293*
14294* DESCRIPTION: Retrieve related system calibration data
14295*
14296* PARAMETERS : None
14297*
14298* RETURN : Pointer of related system calibration data
14299*==========================================================================*/
14300const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14301{
14302 return (const cam_related_system_calibration_data_t *)
14303 &(gCamCapability[mCameraId]->related_cam_calibration);
14304}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014305
14306/*===========================================================================
14307 * FUNCTION : is60HzZone
14308 *
14309 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14310 *
14311 * PARAMETERS : None
14312 *
14313 * RETURN : True if in 60Hz zone, False otherwise
14314 *==========================================================================*/
14315bool QCamera3HardwareInterface::is60HzZone()
14316{
14317 time_t t = time(NULL);
14318 struct tm lt;
14319
14320 struct tm* r = localtime_r(&t, &lt);
14321
14322 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14323 return true;
14324 else
14325 return false;
14326}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014327
14328/*===========================================================================
14329 * FUNCTION : adjustBlackLevelForCFA
14330 *
14331 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14332 * of bayer CFA (Color Filter Array).
14333 *
14334 * PARAMETERS : @input: black level pattern in the order of RGGB
14335 * @output: black level pattern in the order of CFA
14336 * @color_arrangement: CFA color arrangement
14337 *
14338 * RETURN : None
14339 *==========================================================================*/
14340template<typename T>
14341void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14342 T input[BLACK_LEVEL_PATTERN_CNT],
14343 T output[BLACK_LEVEL_PATTERN_CNT],
14344 cam_color_filter_arrangement_t color_arrangement)
14345{
14346 switch (color_arrangement) {
14347 case CAM_FILTER_ARRANGEMENT_GRBG:
14348 output[0] = input[1];
14349 output[1] = input[0];
14350 output[2] = input[3];
14351 output[3] = input[2];
14352 break;
14353 case CAM_FILTER_ARRANGEMENT_GBRG:
14354 output[0] = input[2];
14355 output[1] = input[3];
14356 output[2] = input[0];
14357 output[3] = input[1];
14358 break;
14359 case CAM_FILTER_ARRANGEMENT_BGGR:
14360 output[0] = input[3];
14361 output[1] = input[2];
14362 output[2] = input[1];
14363 output[3] = input[0];
14364 break;
14365 case CAM_FILTER_ARRANGEMENT_RGGB:
14366 output[0] = input[0];
14367 output[1] = input[1];
14368 output[2] = input[2];
14369 output[3] = input[3];
14370 break;
14371 default:
14372 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14373 break;
14374 }
14375}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014376
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014377void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14378 CameraMetadata &resultMetadata,
14379 std::shared_ptr<metadata_buffer_t> settings)
14380{
14381 if (settings == nullptr) {
14382 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14383 return;
14384 }
14385
14386 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14387 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14388 }
14389
14390 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14391 String8 str((const char *)gps_methods);
14392 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14393 }
14394
14395 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14396 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14397 }
14398
14399 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14400 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14401 }
14402
14403 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14404 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14405 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14406 }
14407
14408 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14409 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14410 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14411 }
14412
14413 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14414 int32_t fwk_thumb_size[2];
14415 fwk_thumb_size[0] = thumb_size->width;
14416 fwk_thumb_size[1] = thumb_size->height;
14417 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14418 }
14419
14420 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14421 uint8_t fwk_intent = intent[0];
14422 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14423 }
14424}
14425
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014426bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14427 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14428 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014429{
14430 if (hdrPlusRequest == nullptr) return false;
14431
14432 // Check noise reduction mode is high quality.
14433 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14434 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14435 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014436 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14437 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014438 return false;
14439 }
14440
14441 // Check edge mode is high quality.
14442 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14443 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14444 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14445 return false;
14446 }
14447
14448 if (request.num_output_buffers != 1 ||
14449 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14450 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014451 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14452 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14453 request.output_buffers[0].stream->width,
14454 request.output_buffers[0].stream->height,
14455 request.output_buffers[0].stream->format);
14456 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014457 return false;
14458 }
14459
14460 // Get a YUV buffer from pic channel.
14461 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14462 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14463 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14464 if (res != OK) {
14465 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14466 __FUNCTION__, strerror(-res), res);
14467 return false;
14468 }
14469
14470 pbcamera::StreamBuffer buffer;
14471 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014472 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014473 buffer.data = yuvBuffer->buffer;
14474 buffer.dataSize = yuvBuffer->frame_len;
14475
14476 pbcamera::CaptureRequest pbRequest;
14477 pbRequest.id = request.frame_number;
14478 pbRequest.outputBuffers.push_back(buffer);
14479
14480 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014481 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014482 if (res != OK) {
14483 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14484 strerror(-res), res);
14485 return false;
14486 }
14487
14488 hdrPlusRequest->yuvBuffer = yuvBuffer;
14489 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14490
14491 return true;
14492}
14493
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014494status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14495{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014496 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14497 return OK;
14498 }
14499
14500 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14501 if (res != OK) {
14502 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14503 strerror(-res), res);
14504 return res;
14505 }
14506 gHdrPlusClientOpening = true;
14507
14508 return OK;
14509}
14510
Chien-Yu Chenee335912017-02-09 17:53:20 -080014511status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14512{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014513 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014514
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014515 // Check if gHdrPlusClient is opened or being opened.
14516 if (gHdrPlusClient == nullptr) {
14517 if (gHdrPlusClientOpening) {
14518 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14519 return OK;
14520 }
14521
14522 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014523 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014524 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14525 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014526 return res;
14527 }
14528
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014529 // When opening HDR+ client completes, HDR+ mode will be enabled.
14530 return OK;
14531
Chien-Yu Chenee335912017-02-09 17:53:20 -080014532 }
14533
14534 // Configure stream for HDR+.
14535 res = configureHdrPlusStreamsLocked();
14536 if (res != OK) {
14537 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014538 return res;
14539 }
14540
14541 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14542 res = gHdrPlusClient->setZslHdrPlusMode(true);
14543 if (res != OK) {
14544 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014545 return res;
14546 }
14547
14548 mHdrPlusModeEnabled = true;
14549 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14550
14551 return OK;
14552}
14553
14554void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14555{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014556 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014557 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014558 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14559 if (res != OK) {
14560 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14561 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014562
14563 // Close HDR+ client so Easel can enter low power mode.
14564 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14565 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014566 }
14567
14568 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014569 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014570 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14571}
14572
14573status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014574{
14575 pbcamera::InputConfiguration inputConfig;
14576 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14577 status_t res = OK;
14578
14579 // Configure HDR+ client streams.
14580 // Get input config.
14581 if (mHdrPlusRawSrcChannel) {
14582 // HDR+ input buffers will be provided by HAL.
14583 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14584 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14585 if (res != OK) {
14586 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14587 __FUNCTION__, strerror(-res), res);
14588 return res;
14589 }
14590
14591 inputConfig.isSensorInput = false;
14592 } else {
14593 // Sensor MIPI will send data to Easel.
14594 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014595 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014596 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14597 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14598 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14599 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14600 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14601 if (mSensorModeInfo.num_raw_bits != 10) {
14602 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14603 mSensorModeInfo.num_raw_bits);
14604 return BAD_VALUE;
14605 }
14606
14607 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014608 }
14609
14610 // Get output configurations.
14611 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014612 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014613
14614 // Easel may need to output YUV output buffers if mPictureChannel was created.
14615 pbcamera::StreamConfiguration yuvOutputConfig;
14616 if (mPictureChannel != nullptr) {
14617 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14618 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14619 if (res != OK) {
14620 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14621 __FUNCTION__, strerror(-res), res);
14622
14623 return res;
14624 }
14625
14626 outputStreamConfigs.push_back(yuvOutputConfig);
14627 }
14628
14629 // TODO: consider other channels for YUV output buffers.
14630
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014631 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014632 if (res != OK) {
14633 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14634 strerror(-res), res);
14635 return res;
14636 }
14637
14638 return OK;
14639}
14640
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014641void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14642{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014643 if (client == nullptr) {
14644 ALOGE("%s: Opened client is null.", __FUNCTION__);
14645 return;
14646 }
14647
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014648 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014649 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14650
14651 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014652 if (!gHdrPlusClientOpening) {
14653 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14654 return;
14655 }
14656
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014657 gHdrPlusClient = std::move(client);
14658 gHdrPlusClientOpening = false;
14659
14660 // Set static metadata.
14661 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14662 if (res != OK) {
14663 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14664 __FUNCTION__, strerror(-res), res);
14665 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14666 gHdrPlusClient = nullptr;
14667 return;
14668 }
14669
14670 // Enable HDR+ mode.
14671 res = enableHdrPlusModeLocked();
14672 if (res != OK) {
14673 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14674 }
14675}
14676
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014677void QCamera3HardwareInterface::onOpenFailed(status_t err)
14678{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014679 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14680 Mutex::Autolock l(gHdrPlusClientLock);
14681 gHdrPlusClientOpening = false;
14682}
14683
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014684void QCamera3HardwareInterface::onFatalError()
14685{
14686 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14687
14688 // Set HAL state to error.
14689 pthread_mutex_lock(&mMutex);
14690 mState = ERROR;
14691 pthread_mutex_unlock(&mMutex);
14692
14693 handleCameraDeviceError();
14694}
14695
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014696void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014697 const camera_metadata_t &resultMetadata)
14698{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014699 if (result != nullptr) {
14700 if (result->outputBuffers.size() != 1) {
14701 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14702 result->outputBuffers.size());
14703 return;
14704 }
14705
14706 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14707 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14708 result->outputBuffers[0].streamId);
14709 return;
14710 }
14711
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014712 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014713 HdrPlusPendingRequest pendingRequest;
14714 {
14715 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14716 auto req = mHdrPlusPendingRequests.find(result->requestId);
14717 pendingRequest = req->second;
14718 }
14719
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014720 // Update the result metadata with the settings of the HDR+ still capture request because
14721 // the result metadata belongs to a ZSL buffer.
14722 CameraMetadata metadata;
14723 metadata = &resultMetadata;
14724 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14725 camera_metadata_t* updatedResultMetadata = metadata.release();
14726
14727 QCamera3PicChannel *picChannel =
14728 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14729
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014730 // Check if dumping HDR+ YUV output is enabled.
14731 char prop[PROPERTY_VALUE_MAX];
14732 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14733 bool dumpYuvOutput = atoi(prop);
14734
14735 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014736 // Dump yuv buffer to a ppm file.
14737 pbcamera::StreamConfiguration outputConfig;
14738 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14739 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14740 if (rc == OK) {
14741 char buf[FILENAME_MAX] = {};
14742 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14743 result->requestId, result->outputBuffers[0].streamId,
14744 outputConfig.image.width, outputConfig.image.height);
14745
14746 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14747 } else {
14748 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14749 __FUNCTION__, strerror(-rc), rc);
14750 }
14751 }
14752
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014753 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14754 auto halMetadata = std::make_shared<metadata_buffer_t>();
14755 clear_metadata_buffer(halMetadata.get());
14756
14757 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14758 // encoding.
14759 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14760 halStreamId, /*minFrameDuration*/0);
14761 if (res == OK) {
14762 // Return the buffer to pic channel for encoding.
14763 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14764 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14765 halMetadata);
14766 } else {
14767 // Return the buffer without encoding.
14768 // TODO: This should not happen but we may want to report an error buffer to camera
14769 // service.
14770 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14771 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14772 strerror(-res), res);
14773 }
14774
14775 // Send HDR+ metadata to framework.
14776 {
14777 pthread_mutex_lock(&mMutex);
14778
14779 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14780 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14781 pthread_mutex_unlock(&mMutex);
14782 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014783
14784 // Remove the HDR+ pending request.
14785 {
14786 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14787 auto req = mHdrPlusPendingRequests.find(result->requestId);
14788 mHdrPlusPendingRequests.erase(req);
14789 }
14790 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014791}
14792
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014793void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14794{
14795 if (failedResult == nullptr) {
14796 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14797 return;
14798 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014799
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014800 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014801
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014802 // Remove the pending HDR+ request.
14803 {
14804 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14805 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14806
14807 // Return the buffer to pic channel.
14808 QCamera3PicChannel *picChannel =
14809 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14810 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14811
14812 mHdrPlusPendingRequests.erase(pendingRequest);
14813 }
14814
14815 pthread_mutex_lock(&mMutex);
14816
14817 // Find the pending buffers.
14818 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14819 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14820 if (pendingBuffers->frame_number == failedResult->requestId) {
14821 break;
14822 }
14823 pendingBuffers++;
14824 }
14825
14826 // Send out buffer errors for the pending buffers.
14827 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14828 std::vector<camera3_stream_buffer_t> streamBuffers;
14829 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14830 // Prepare a stream buffer.
14831 camera3_stream_buffer_t streamBuffer = {};
14832 streamBuffer.stream = buffer.stream;
14833 streamBuffer.buffer = buffer.buffer;
14834 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14835 streamBuffer.acquire_fence = -1;
14836 streamBuffer.release_fence = -1;
14837
14838 streamBuffers.push_back(streamBuffer);
14839
14840 // Send out error buffer event.
14841 camera3_notify_msg_t notify_msg = {};
14842 notify_msg.type = CAMERA3_MSG_ERROR;
14843 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14844 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14845 notify_msg.message.error.error_stream = buffer.stream;
14846
14847 orchestrateNotify(&notify_msg);
14848 }
14849
14850 camera3_capture_result_t result = {};
14851 result.frame_number = pendingBuffers->frame_number;
14852 result.num_output_buffers = streamBuffers.size();
14853 result.output_buffers = &streamBuffers[0];
14854
14855 // Send out result with buffer errors.
14856 orchestrateResult(&result);
14857
14858 // Remove pending buffers.
14859 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14860 }
14861
14862 // Remove pending request.
14863 auto halRequest = mPendingRequestsList.begin();
14864 while (halRequest != mPendingRequestsList.end()) {
14865 if (halRequest->frame_number == failedResult->requestId) {
14866 mPendingRequestsList.erase(halRequest);
14867 break;
14868 }
14869 halRequest++;
14870 }
14871
14872 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014873}
14874
Thierry Strudel3d639192016-09-09 11:52:26 -070014875}; //end namespace qcamera