blob: c44c82bbfd2925984fb2f72f79a51e607a38b894 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
Jiyong Parkd4caeb72017-06-12 17:16:36 +090068using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070069using namespace android;
70
71namespace qcamera {
72
73#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
74
75#define EMPTY_PIPELINE_DELAY 2
76#define PARTIAL_RESULT_COUNT 2
77#define FRAME_SKIP_DELAY 0
78
79#define MAX_VALUE_8BIT ((1<<8)-1)
80#define MAX_VALUE_10BIT ((1<<10)-1)
81#define MAX_VALUE_12BIT ((1<<12)-1)
82
83#define VIDEO_4K_WIDTH 3840
84#define VIDEO_4K_HEIGHT 2160
85
Jason Leeb9e76432017-03-10 17:14:19 -080086#define MAX_EIS_WIDTH 3840
87#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070088
89#define MAX_RAW_STREAMS 1
90#define MAX_STALLING_STREAMS 1
91#define MAX_PROCESSED_STREAMS 3
92/* Batch mode is enabled only if FPS set is equal to or greater than this */
93#define MIN_FPS_FOR_BATCH_MODE (120)
94#define PREVIEW_FPS_FOR_HFR (30)
95#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080096#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070097#define MAX_HFR_BATCH_SIZE (8)
98#define REGIONS_TUPLE_COUNT 5
99#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
101#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700137// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
138#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
139
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700140// Whether to check for the GPU stride padding, or use the default
141//#define CHECK_GPU_PIXEL_ALIGNMENT
142
Thierry Strudel3d639192016-09-09 11:52:26 -0700143cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
144const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
145extern pthread_mutex_t gCamLock;
146volatile uint32_t gCamHal3LogLevel = 1;
147extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700148
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800149// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700151std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700152bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
153std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
154bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700155bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700156bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700157
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800158// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
159bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
161Mutex gHdrPlusClientLock; // Protect above Easel related variables.
162
Thierry Strudel3d639192016-09-09 11:52:26 -0700163
164const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
165 {"On", CAM_CDS_MODE_ON},
166 {"Off", CAM_CDS_MODE_OFF},
167 {"Auto",CAM_CDS_MODE_AUTO}
168};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_video_hdr_mode_t,
171 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
172 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
173 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
174};
175
Thierry Strudel54dc9782017-02-15 12:12:10 -0800176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_binning_correction_mode_t,
178 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
179 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
180 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
181};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700182
183const QCamera3HardwareInterface::QCameraMap<
184 camera_metadata_enum_android_ir_mode_t,
185 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
186 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
187 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
188 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
189};
Thierry Strudel3d639192016-09-09 11:52:26 -0700190
191const QCamera3HardwareInterface::QCameraMap<
192 camera_metadata_enum_android_control_effect_mode_t,
193 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
194 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
195 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
196 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
197 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
199 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
200 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
202 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
203};
204
205const QCamera3HardwareInterface::QCameraMap<
206 camera_metadata_enum_android_control_awb_mode_t,
207 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
208 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
209 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
210 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
211 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
212 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
213 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
215 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
216 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220 camera_metadata_enum_android_control_scene_mode_t,
221 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
222 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
223 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
224 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
225 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
227 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
228 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
229 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
230 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
231 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
232 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
233 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
234 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
235 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
236 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800237 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
238 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700239};
240
241const QCamera3HardwareInterface::QCameraMap<
242 camera_metadata_enum_android_control_af_mode_t,
243 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
245 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
246 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
247 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
248 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
250 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
251};
252
253const QCamera3HardwareInterface::QCameraMap<
254 camera_metadata_enum_android_color_correction_aberration_mode_t,
255 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
257 CAM_COLOR_CORRECTION_ABERRATION_OFF },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
259 CAM_COLOR_CORRECTION_ABERRATION_FAST },
260 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
261 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
262};
263
264const QCamera3HardwareInterface::QCameraMap<
265 camera_metadata_enum_android_control_ae_antibanding_mode_t,
266 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
270 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
271};
272
273const QCamera3HardwareInterface::QCameraMap<
274 camera_metadata_enum_android_control_ae_mode_t,
275 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
276 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
279 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
280 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100463 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800464 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mChannelHandle(0),
466 mFirstConfiguration(true),
467 mFlush(false),
468 mFlushPerf(false),
469 mParamHeap(NULL),
470 mParameters(NULL),
471 mPrevParameters(NULL),
472 m_bIsVideo(false),
473 m_bIs4KVideo(false),
474 m_bEisSupportedSize(false),
475 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800476 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700478 mShutterDispatcher(this),
479 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 mMinProcessedFrameDuration(0),
481 mMinJpegFrameDuration(0),
482 mMinRawFrameDuration(0),
483 mMetaFrameCount(0U),
484 mUpdateDebugLevel(false),
485 mCallbacks(callbacks),
486 mCaptureIntent(0),
487 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700488 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800489 /* DevCamDebug metadata internal m control*/
490 mDevCamDebugMetaEnable(0),
491 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700492 mBatchSize(0),
493 mToBeQueuedVidBufs(0),
494 mHFRVideoFps(DEFAULT_VIDEO_FPS),
495 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800496 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800497 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700498 mFirstFrameNumberInBatch(0),
499 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800500 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700501 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
502 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000503 mPDSupported(false),
504 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700505 mInstantAEC(false),
506 mResetInstantAEC(false),
507 mInstantAECSettledFrameNumber(0),
508 mAecSkipDisplayFrameBound(0),
509 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800510 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700513 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mState(CLOSED),
515 mIsDeviceLinked(false),
516 mIsMainCamera(true),
517 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700518 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800519 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700521 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800522 mIsApInputUsedForHdrPlus(false),
523 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800524 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700525{
526 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCommon.init(gCamCapability[cameraId]);
528 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700529#ifndef USE_HAL_3_3
530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
531#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700533#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.close = close_camera_device;
535 mCameraDevice.ops = &mCameraOps;
536 mCameraDevice.priv = this;
537 gCamCapability[cameraId]->version = CAM_HAL_V3;
538 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
539 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
540 gCamCapability[cameraId]->min_num_pp_bufs = 3;
541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545 mPendingLiveRequest = 0;
546 mCurrentRequestId = -1;
547 pthread_mutex_init(&mMutex, NULL);
548
549 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
550 mDefaultMetadata[i] = NULL;
551
552 // Getting system props of different kinds
553 char prop[PROPERTY_VALUE_MAX];
554 memset(prop, 0, sizeof(prop));
555 property_get("persist.camera.raw.dump", prop, "0");
556 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800557 property_get("persist.camera.hal3.force.hdr", prop, "0");
558 mForceHdrSnapshot = atoi(prop);
559
Thierry Strudel3d639192016-09-09 11:52:26 -0700560 if (mEnableRawDump)
561 LOGD("Raw dump from Camera HAL enabled");
562
563 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
564 memset(mLdafCalib, 0, sizeof(mLdafCalib));
565
566 memset(prop, 0, sizeof(prop));
567 property_get("persist.camera.tnr.preview", prop, "0");
568 m_bTnrPreview = (uint8_t)atoi(prop);
569
570 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800571 property_get("persist.camera.swtnr.preview", prop, "1");
572 m_bSwTnrPreview = (uint8_t)atoi(prop);
573
574 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700575 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 m_bTnrVideo = (uint8_t)atoi(prop);
577
578 memset(prop, 0, sizeof(prop));
579 property_get("persist.camera.avtimer.debug", prop, "0");
580 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800581 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700582
Thierry Strudel54dc9782017-02-15 12:12:10 -0800583 memset(prop, 0, sizeof(prop));
584 property_get("persist.camera.cacmode.disable", prop, "0");
585 m_cacModeDisabled = (uint8_t)atoi(prop);
586
Thierry Strudel3d639192016-09-09 11:52:26 -0700587 //Load and read GPU library.
588 lib_surface_utils = NULL;
589 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700590 mSurfaceStridePadding = CAM_PAD_TO_64;
591#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700592 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
593 if (lib_surface_utils) {
594 *(void **)&LINK_get_surface_pixel_alignment =
595 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
596 if (LINK_get_surface_pixel_alignment) {
597 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
598 }
599 dlclose(lib_surface_utils);
600 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700601#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000602 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
603 mPDSupported = (0 <= mPDIndex) ? true : false;
604
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700605 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700606}
607
608/*===========================================================================
609 * FUNCTION : ~QCamera3HardwareInterface
610 *
611 * DESCRIPTION: destructor of QCamera3HardwareInterface
612 *
613 * PARAMETERS : none
614 *
615 * RETURN : none
616 *==========================================================================*/
617QCamera3HardwareInterface::~QCamera3HardwareInterface()
618{
619 LOGD("E");
620
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800621 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700622
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800623 // Disable power hint and enable the perf lock for close camera
624 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
625 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
626
627 // unlink of dualcam during close camera
628 if (mIsDeviceLinked) {
629 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
630 &m_pDualCamCmdPtr->bundle_info;
631 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
632 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
633 pthread_mutex_lock(&gCamLock);
634
635 if (mIsMainCamera == 1) {
636 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
637 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
638 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
639 // related session id should be session id of linked session
640 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
641 } else {
642 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
643 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
644 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
645 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
646 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800647 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800648 pthread_mutex_unlock(&gCamLock);
649
650 rc = mCameraHandle->ops->set_dual_cam_cmd(
651 mCameraHandle->camera_handle);
652 if (rc < 0) {
653 LOGE("Dualcam: Unlink failed, but still proceed to close");
654 }
655 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700656
657 /* We need to stop all streams before deleting any stream */
658 if (mRawDumpChannel) {
659 mRawDumpChannel->stop();
660 }
661
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700662 if (mHdrPlusRawSrcChannel) {
663 mHdrPlusRawSrcChannel->stop();
664 }
665
Thierry Strudel3d639192016-09-09 11:52:26 -0700666 // NOTE: 'camera3_stream_t *' objects are already freed at
667 // this stage by the framework
668 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
669 it != mStreamInfo.end(); it++) {
670 QCamera3ProcessingChannel *channel = (*it)->channel;
671 if (channel) {
672 channel->stop();
673 }
674 }
675 if (mSupportChannel)
676 mSupportChannel->stop();
677
678 if (mAnalysisChannel) {
679 mAnalysisChannel->stop();
680 }
681 if (mMetadataChannel) {
682 mMetadataChannel->stop();
683 }
684 if (mChannelHandle) {
685 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
686 mChannelHandle);
687 LOGD("stopping channel %d", mChannelHandle);
688 }
689
690 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
691 it != mStreamInfo.end(); it++) {
692 QCamera3ProcessingChannel *channel = (*it)->channel;
693 if (channel)
694 delete channel;
695 free (*it);
696 }
697 if (mSupportChannel) {
698 delete mSupportChannel;
699 mSupportChannel = NULL;
700 }
701
702 if (mAnalysisChannel) {
703 delete mAnalysisChannel;
704 mAnalysisChannel = NULL;
705 }
706 if (mRawDumpChannel) {
707 delete mRawDumpChannel;
708 mRawDumpChannel = NULL;
709 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700710 if (mHdrPlusRawSrcChannel) {
711 delete mHdrPlusRawSrcChannel;
712 mHdrPlusRawSrcChannel = NULL;
713 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700714 if (mDummyBatchChannel) {
715 delete mDummyBatchChannel;
716 mDummyBatchChannel = NULL;
717 }
718
719 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800720 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700721
722 if (mMetadataChannel) {
723 delete mMetadataChannel;
724 mMetadataChannel = NULL;
725 }
726
727 /* Clean up all channels */
728 if (mCameraInitialized) {
729 if(!mFirstConfiguration){
730 //send the last unconfigure
731 cam_stream_size_info_t stream_config_info;
732 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
733 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
734 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800735 m_bIs4KVideo ? 0 :
736 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700737 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
739 stream_config_info);
740 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
741 if (rc < 0) {
742 LOGE("set_parms failed for unconfigure");
743 }
744 }
745 deinitParameters();
746 }
747
748 if (mChannelHandle) {
749 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
750 mChannelHandle);
751 LOGH("deleting channel %d", mChannelHandle);
752 mChannelHandle = 0;
753 }
754
755 if (mState != CLOSED)
756 closeCamera();
757
758 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
759 req.mPendingBufferList.clear();
760 }
761 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 for (pendingRequestIterator i = mPendingRequestsList.begin();
763 i != mPendingRequestsList.end();) {
764 i = erasePendingRequest(i);
765 }
766 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
767 if (mDefaultMetadata[i])
768 free_camera_metadata(mDefaultMetadata[i]);
769
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800770 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700771
772 pthread_cond_destroy(&mRequestCond);
773
774 pthread_cond_destroy(&mBuffersCond);
775
776 pthread_mutex_destroy(&mMutex);
777 LOGD("X");
778}
779
780/*===========================================================================
781 * FUNCTION : erasePendingRequest
782 *
783 * DESCRIPTION: function to erase a desired pending request after freeing any
784 * allocated memory
785 *
786 * PARAMETERS :
787 * @i : iterator pointing to pending request to be erased
788 *
789 * RETURN : iterator pointing to the next request
790 *==========================================================================*/
791QCamera3HardwareInterface::pendingRequestIterator
792 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
793{
794 if (i->input_buffer != NULL) {
795 free(i->input_buffer);
796 i->input_buffer = NULL;
797 }
798 if (i->settings != NULL)
799 free_camera_metadata((camera_metadata_t*)i->settings);
800 return mPendingRequestsList.erase(i);
801}
802
803/*===========================================================================
804 * FUNCTION : camEvtHandle
805 *
806 * DESCRIPTION: Function registered to mm-camera-interface to handle events
807 *
808 * PARAMETERS :
809 * @camera_handle : interface layer camera handle
810 * @evt : ptr to event
811 * @user_data : user data ptr
812 *
813 * RETURN : none
814 *==========================================================================*/
815void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
816 mm_camera_event_t *evt,
817 void *user_data)
818{
819 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
820 if (obj && evt) {
821 switch(evt->server_event_type) {
822 case CAM_EVENT_TYPE_DAEMON_DIED:
823 pthread_mutex_lock(&obj->mMutex);
824 obj->mState = ERROR;
825 pthread_mutex_unlock(&obj->mMutex);
826 LOGE("Fatal, camera daemon died");
827 break;
828
829 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
830 LOGD("HAL got request pull from Daemon");
831 pthread_mutex_lock(&obj->mMutex);
832 obj->mWokenUpByDaemon = true;
833 obj->unblockRequestIfNecessary();
834 pthread_mutex_unlock(&obj->mMutex);
835 break;
836
837 default:
838 LOGW("Warning: Unhandled event %d",
839 evt->server_event_type);
840 break;
841 }
842 } else {
843 LOGE("NULL user_data/evt");
844 }
845}
846
847/*===========================================================================
848 * FUNCTION : openCamera
849 *
850 * DESCRIPTION: open camera
851 *
852 * PARAMETERS :
853 * @hw_device : double ptr for camera device struct
854 *
855 * RETURN : int32_t type of status
856 * NO_ERROR -- success
857 * none-zero failure code
858 *==========================================================================*/
859int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
860{
861 int rc = 0;
862 if (mState != CLOSED) {
863 *hw_device = NULL;
864 return PERMISSION_DENIED;
865 }
866
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700867 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800868 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
870 mCameraId);
871
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700872 if (mCameraHandle) {
873 LOGE("Failure: Camera already opened");
874 return ALREADY_EXISTS;
875 }
876
877 {
878 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700879 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700880 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700881 rc = gEaselManagerClient->resume();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700882 if (rc != 0) {
883 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
884 return rc;
885 }
886 }
887 }
888
Thierry Strudel3d639192016-09-09 11:52:26 -0700889 rc = openCamera();
890 if (rc == 0) {
891 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800892 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700893 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700894
895 // Suspend Easel because opening camera failed.
896 {
897 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700898 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
899 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700900 if (suspendErr != 0) {
901 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
902 strerror(-suspendErr), suspendErr);
903 }
904 }
905 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800906 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700907
Thierry Strudel3d639192016-09-09 11:52:26 -0700908 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
909 mCameraId, rc);
910
911 if (rc == NO_ERROR) {
912 mState = OPENED;
913 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800914
Thierry Strudel3d639192016-09-09 11:52:26 -0700915 return rc;
916}
917
918/*===========================================================================
919 * FUNCTION : openCamera
920 *
921 * DESCRIPTION: open camera
922 *
923 * PARAMETERS : none
924 *
925 * RETURN : int32_t type of status
926 * NO_ERROR -- success
927 * none-zero failure code
928 *==========================================================================*/
929int QCamera3HardwareInterface::openCamera()
930{
931 int rc = 0;
932 char value[PROPERTY_VALUE_MAX];
933
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800934 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800935
Thierry Strudel3d639192016-09-09 11:52:26 -0700936 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
937 if (rc < 0) {
938 LOGE("Failed to reserve flash for camera id: %d",
939 mCameraId);
940 return UNKNOWN_ERROR;
941 }
942
943 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
944 if (rc) {
945 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
946 return rc;
947 }
948
949 if (!mCameraHandle) {
950 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
951 return -ENODEV;
952 }
953
954 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
955 camEvtHandle, (void *)this);
956
957 if (rc < 0) {
958 LOGE("Error, failed to register event callback");
959 /* Not closing camera here since it is already handled in destructor */
960 return FAILED_TRANSACTION;
961 }
962
963 mExifParams.debug_params =
964 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
965 if (mExifParams.debug_params) {
966 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
967 } else {
968 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
969 return NO_MEMORY;
970 }
971 mFirstConfiguration = true;
972
973 //Notify display HAL that a camera session is active.
974 //But avoid calling the same during bootup because camera service might open/close
975 //cameras at boot time during its initialization and display service will also internally
976 //wait for camera service to initialize first while calling this display API, resulting in a
977 //deadlock situation. Since boot time camera open/close calls are made only to fetch
978 //capabilities, no need of this display bw optimization.
979 //Use "service.bootanim.exit" property to know boot status.
980 property_get("service.bootanim.exit", value, "0");
981 if (atoi(value) == 1) {
982 pthread_mutex_lock(&gCamLock);
983 if (gNumCameraSessions++ == 0) {
984 setCameraLaunchStatus(true);
985 }
986 pthread_mutex_unlock(&gCamLock);
987 }
988
989 //fill the session id needed while linking dual cam
990 pthread_mutex_lock(&gCamLock);
991 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
992 &sessionId[mCameraId]);
993 pthread_mutex_unlock(&gCamLock);
994
995 if (rc < 0) {
996 LOGE("Error, failed to get sessiion id");
997 return UNKNOWN_ERROR;
998 } else {
999 //Allocate related cam sync buffer
1000 //this is needed for the payload that goes along with bundling cmd for related
1001 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001002 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1003 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001004 if(rc != OK) {
1005 rc = NO_MEMORY;
1006 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1007 return NO_MEMORY;
1008 }
1009
1010 //Map memory for related cam sync buffer
1011 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001012 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1013 m_pDualCamCmdHeap->getFd(0),
1014 sizeof(cam_dual_camera_cmd_info_t),
1015 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001016 if(rc < 0) {
1017 LOGE("Dualcam: failed to map Related cam sync buffer");
1018 rc = FAILED_TRANSACTION;
1019 return NO_MEMORY;
1020 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001021 m_pDualCamCmdPtr =
1022 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001023 }
1024
1025 LOGH("mCameraId=%d",mCameraId);
1026
1027 return NO_ERROR;
1028}
1029
1030/*===========================================================================
1031 * FUNCTION : closeCamera
1032 *
1033 * DESCRIPTION: close camera
1034 *
1035 * PARAMETERS : none
1036 *
1037 * RETURN : int32_t type of status
1038 * NO_ERROR -- success
1039 * none-zero failure code
1040 *==========================================================================*/
1041int QCamera3HardwareInterface::closeCamera()
1042{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001043 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001044 int rc = NO_ERROR;
1045 char value[PROPERTY_VALUE_MAX];
1046
1047 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1048 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001049
1050 // unmap memory for related cam sync buffer
1051 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001052 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 if (NULL != m_pDualCamCmdHeap) {
1054 m_pDualCamCmdHeap->deallocate();
1055 delete m_pDualCamCmdHeap;
1056 m_pDualCamCmdHeap = NULL;
1057 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001058 }
1059
Thierry Strudel3d639192016-09-09 11:52:26 -07001060 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1061 mCameraHandle = NULL;
1062
1063 //reset session id to some invalid id
1064 pthread_mutex_lock(&gCamLock);
1065 sessionId[mCameraId] = 0xDEADBEEF;
1066 pthread_mutex_unlock(&gCamLock);
1067
1068 //Notify display HAL that there is no active camera session
1069 //but avoid calling the same during bootup. Refer to openCamera
1070 //for more details.
1071 property_get("service.bootanim.exit", value, "0");
1072 if (atoi(value) == 1) {
1073 pthread_mutex_lock(&gCamLock);
1074 if (--gNumCameraSessions == 0) {
1075 setCameraLaunchStatus(false);
1076 }
1077 pthread_mutex_unlock(&gCamLock);
1078 }
1079
Thierry Strudel3d639192016-09-09 11:52:26 -07001080 if (mExifParams.debug_params) {
1081 free(mExifParams.debug_params);
1082 mExifParams.debug_params = NULL;
1083 }
1084 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1085 LOGW("Failed to release flash for camera id: %d",
1086 mCameraId);
1087 }
1088 mState = CLOSED;
1089 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1090 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001091
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001092 {
1093 Mutex::Autolock l(gHdrPlusClientLock);
1094 if (gHdrPlusClient != nullptr) {
1095 // Disable HDR+ mode.
1096 disableHdrPlusModeLocked();
1097 // Disconnect Easel if it's connected.
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001098 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001099 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001100 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001101
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001102 if (EaselManagerClientOpened) {
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001103 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001104 if (rc != 0) {
1105 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1106 }
1107
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001108 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001109 if (rc != 0) {
1110 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1111 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001112 }
1113 }
1114
Thierry Strudel3d639192016-09-09 11:52:26 -07001115 return rc;
1116}
1117
1118/*===========================================================================
1119 * FUNCTION : initialize
1120 *
1121 * DESCRIPTION: Initialize frameworks callback functions
1122 *
1123 * PARAMETERS :
1124 * @callback_ops : callback function to frameworks
1125 *
1126 * RETURN :
1127 *
1128 *==========================================================================*/
1129int QCamera3HardwareInterface::initialize(
1130 const struct camera3_callback_ops *callback_ops)
1131{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001132 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001133 int rc;
1134
1135 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1136 pthread_mutex_lock(&mMutex);
1137
1138 // Validate current state
1139 switch (mState) {
1140 case OPENED:
1141 /* valid state */
1142 break;
1143 default:
1144 LOGE("Invalid state %d", mState);
1145 rc = -ENODEV;
1146 goto err1;
1147 }
1148
1149 rc = initParameters();
1150 if (rc < 0) {
1151 LOGE("initParamters failed %d", rc);
1152 goto err1;
1153 }
1154 mCallbackOps = callback_ops;
1155
1156 mChannelHandle = mCameraHandle->ops->add_channel(
1157 mCameraHandle->camera_handle, NULL, NULL, this);
1158 if (mChannelHandle == 0) {
1159 LOGE("add_channel failed");
1160 rc = -ENOMEM;
1161 pthread_mutex_unlock(&mMutex);
1162 return rc;
1163 }
1164
1165 pthread_mutex_unlock(&mMutex);
1166 mCameraInitialized = true;
1167 mState = INITIALIZED;
1168 LOGI("X");
1169 return 0;
1170
1171err1:
1172 pthread_mutex_unlock(&mMutex);
1173 return rc;
1174}
1175
1176/*===========================================================================
1177 * FUNCTION : validateStreamDimensions
1178 *
1179 * DESCRIPTION: Check if the configuration requested are those advertised
1180 *
1181 * PARAMETERS :
1182 * @stream_list : streams to be configured
1183 *
1184 * RETURN :
1185 *
1186 *==========================================================================*/
1187int QCamera3HardwareInterface::validateStreamDimensions(
1188 camera3_stream_configuration_t *streamList)
1189{
1190 int rc = NO_ERROR;
1191 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001192 uint32_t depthWidth = 0;
1193 uint32_t depthHeight = 0;
1194 if (mPDSupported) {
1195 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1196 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1197 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001198
1199 camera3_stream_t *inputStream = NULL;
1200 /*
1201 * Loop through all streams to find input stream if it exists*
1202 */
1203 for (size_t i = 0; i< streamList->num_streams; i++) {
1204 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1205 if (inputStream != NULL) {
1206 LOGE("Error, Multiple input streams requested");
1207 return -EINVAL;
1208 }
1209 inputStream = streamList->streams[i];
1210 }
1211 }
1212 /*
1213 * Loop through all streams requested in configuration
1214 * Check if unsupported sizes have been requested on any of them
1215 */
1216 for (size_t j = 0; j < streamList->num_streams; j++) {
1217 bool sizeFound = false;
1218 camera3_stream_t *newStream = streamList->streams[j];
1219
1220 uint32_t rotatedHeight = newStream->height;
1221 uint32_t rotatedWidth = newStream->width;
1222 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1223 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1224 rotatedHeight = newStream->width;
1225 rotatedWidth = newStream->height;
1226 }
1227
1228 /*
1229 * Sizes are different for each type of stream format check against
1230 * appropriate table.
1231 */
1232 switch (newStream->format) {
1233 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1234 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1235 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001236 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1237 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1238 mPDSupported) {
1239 if ((depthWidth == newStream->width) &&
1240 (depthHeight == newStream->height)) {
1241 sizeFound = true;
1242 }
1243 break;
1244 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001245 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1246 for (size_t i = 0; i < count; i++) {
1247 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1248 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1249 sizeFound = true;
1250 break;
1251 }
1252 }
1253 break;
1254 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1256 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001257 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001258 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001259 if ((depthSamplesCount == newStream->width) &&
1260 (1 == newStream->height)) {
1261 sizeFound = true;
1262 }
1263 break;
1264 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001265 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1266 /* Verify set size against generated sizes table */
1267 for (size_t i = 0; i < count; i++) {
1268 if (((int32_t)rotatedWidth ==
1269 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1270 ((int32_t)rotatedHeight ==
1271 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1272 sizeFound = true;
1273 break;
1274 }
1275 }
1276 break;
1277 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1278 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1279 default:
1280 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1281 || newStream->stream_type == CAMERA3_STREAM_INPUT
1282 || IS_USAGE_ZSL(newStream->usage)) {
1283 if (((int32_t)rotatedWidth ==
1284 gCamCapability[mCameraId]->active_array_size.width) &&
1285 ((int32_t)rotatedHeight ==
1286 gCamCapability[mCameraId]->active_array_size.height)) {
1287 sizeFound = true;
1288 break;
1289 }
1290 /* We could potentially break here to enforce ZSL stream
1291 * set from frameworks always is full active array size
1292 * but it is not clear from the spc if framework will always
1293 * follow that, also we have logic to override to full array
1294 * size, so keeping the logic lenient at the moment
1295 */
1296 }
1297 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1298 MAX_SIZES_CNT);
1299 for (size_t i = 0; i < count; i++) {
1300 if (((int32_t)rotatedWidth ==
1301 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1302 ((int32_t)rotatedHeight ==
1303 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1304 sizeFound = true;
1305 break;
1306 }
1307 }
1308 break;
1309 } /* End of switch(newStream->format) */
1310
1311 /* We error out even if a single stream has unsupported size set */
1312 if (!sizeFound) {
1313 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1314 rotatedWidth, rotatedHeight, newStream->format,
1315 gCamCapability[mCameraId]->active_array_size.width,
1316 gCamCapability[mCameraId]->active_array_size.height);
1317 rc = -EINVAL;
1318 break;
1319 }
1320 } /* End of for each stream */
1321 return rc;
1322}
1323
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001324/*===========================================================================
1325 * FUNCTION : validateUsageFlags
1326 *
1327 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1328 *
1329 * PARAMETERS :
1330 * @stream_list : streams to be configured
1331 *
1332 * RETURN :
1333 * NO_ERROR if the usage flags are supported
1334 * error code if usage flags are not supported
1335 *
1336 *==========================================================================*/
1337int QCamera3HardwareInterface::validateUsageFlags(
1338 const camera3_stream_configuration_t* streamList)
1339{
1340 for (size_t j = 0; j < streamList->num_streams; j++) {
1341 const camera3_stream_t *newStream = streamList->streams[j];
1342
1343 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1344 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1345 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1346 continue;
1347 }
1348
Jason Leec4cf5032017-05-24 18:31:41 -07001349 // Here we only care whether it's EIS3 or not
1350 char is_type_value[PROPERTY_VALUE_MAX];
1351 property_get("persist.camera.is_type", is_type_value, "4");
1352 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1353 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1354 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1355 isType = IS_TYPE_NONE;
1356
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001357 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1358 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1359 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1360 bool forcePreviewUBWC = true;
1361 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1362 forcePreviewUBWC = false;
1363 }
1364 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001365 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001366 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001367 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001369 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001370
1371 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1372 // So color spaces will always match.
1373
1374 // Check whether underlying formats of shared streams match.
1375 if (isVideo && isPreview && videoFormat != previewFormat) {
1376 LOGE("Combined video and preview usage flag is not supported");
1377 return -EINVAL;
1378 }
1379 if (isPreview && isZSL && previewFormat != zslFormat) {
1380 LOGE("Combined preview and zsl usage flag is not supported");
1381 return -EINVAL;
1382 }
1383 if (isVideo && isZSL && videoFormat != zslFormat) {
1384 LOGE("Combined video and zsl usage flag is not supported");
1385 return -EINVAL;
1386 }
1387 }
1388 return NO_ERROR;
1389}
1390
1391/*===========================================================================
1392 * FUNCTION : validateUsageFlagsForEis
1393 *
1394 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 *
1399 * RETURN :
1400 * NO_ERROR if the usage flags are supported
1401 * error code if usage flags are not supported
1402 *
1403 *==========================================================================*/
1404int QCamera3HardwareInterface::validateUsageFlagsForEis(
1405 const camera3_stream_configuration_t* streamList)
1406{
1407 for (size_t j = 0; j < streamList->num_streams; j++) {
1408 const camera3_stream_t *newStream = streamList->streams[j];
1409
1410 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1411 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1412
1413 // Because EIS is "hard-coded" for certain use case, and current
1414 // implementation doesn't support shared preview and video on the same
1415 // stream, return failure if EIS is forced on.
1416 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1417 LOGE("Combined video and preview usage flag is not supported due to EIS");
1418 return -EINVAL;
1419 }
1420 }
1421 return NO_ERROR;
1422}
1423
Thierry Strudel3d639192016-09-09 11:52:26 -07001424/*==============================================================================
1425 * FUNCTION : isSupportChannelNeeded
1426 *
1427 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1428 *
1429 * PARAMETERS :
1430 * @stream_list : streams to be configured
1431 * @stream_config_info : the config info for streams to be configured
1432 *
1433 * RETURN : Boolen true/false decision
1434 *
1435 *==========================================================================*/
1436bool QCamera3HardwareInterface::isSupportChannelNeeded(
1437 camera3_stream_configuration_t *streamList,
1438 cam_stream_size_info_t stream_config_info)
1439{
1440 uint32_t i;
1441 bool pprocRequested = false;
1442 /* Check for conditions where PProc pipeline does not have any streams*/
1443 for (i = 0; i < stream_config_info.num_streams; i++) {
1444 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1445 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1446 pprocRequested = true;
1447 break;
1448 }
1449 }
1450
1451 if (pprocRequested == false )
1452 return true;
1453
1454 /* Dummy stream needed if only raw or jpeg streams present */
1455 for (i = 0; i < streamList->num_streams; i++) {
1456 switch(streamList->streams[i]->format) {
1457 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1458 case HAL_PIXEL_FORMAT_RAW10:
1459 case HAL_PIXEL_FORMAT_RAW16:
1460 case HAL_PIXEL_FORMAT_BLOB:
1461 break;
1462 default:
1463 return false;
1464 }
1465 }
1466 return true;
1467}
1468
1469/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001470 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001471 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001473 *
1474 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001475 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001476 *
1477 * RETURN : int32_t type of status
1478 * NO_ERROR -- success
1479 * none-zero failure code
1480 *
1481 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001483{
1484 int32_t rc = NO_ERROR;
1485
1486 cam_dimension_t max_dim = {0, 0};
1487 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1488 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1489 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1490 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1491 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1492 }
1493
1494 clear_metadata_buffer(mParameters);
1495
1496 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1497 max_dim);
1498 if (rc != NO_ERROR) {
1499 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1500 return rc;
1501 }
1502
1503 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1504 if (rc != NO_ERROR) {
1505 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1506 return rc;
1507 }
1508
1509 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001510 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001511
1512 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1513 mParameters);
1514 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001515 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001516 return rc;
1517 }
1518
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001519 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001520 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1521 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1522 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1523 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1524 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001525
1526 return rc;
1527}
1528
1529/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001530 * FUNCTION : getCurrentSensorModeInfo
1531 *
1532 * DESCRIPTION: Get sensor mode information that is currently selected.
1533 *
1534 * PARAMETERS :
1535 * @sensorModeInfo : sensor mode information (output)
1536 *
1537 * RETURN : int32_t type of status
1538 * NO_ERROR -- success
1539 * none-zero failure code
1540 *
1541 *==========================================================================*/
1542int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1543{
1544 int32_t rc = NO_ERROR;
1545
1546 clear_metadata_buffer(mParameters);
1547 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1548
1549 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1550 mParameters);
1551 if (rc != NO_ERROR) {
1552 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1553 return rc;
1554 }
1555
1556 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1557 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1558 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1559 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1560 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1561 sensorModeInfo.num_raw_bits);
1562
1563 return rc;
1564}
1565
1566/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001567 * FUNCTION : addToPPFeatureMask
1568 *
1569 * DESCRIPTION: add additional features to pp feature mask based on
1570 * stream type and usecase
1571 *
1572 * PARAMETERS :
1573 * @stream_format : stream type for feature mask
1574 * @stream_idx : stream idx within postprocess_mask list to change
1575 *
1576 * RETURN : NULL
1577 *
1578 *==========================================================================*/
1579void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1580 uint32_t stream_idx)
1581{
1582 char feature_mask_value[PROPERTY_VALUE_MAX];
1583 cam_feature_mask_t feature_mask;
1584 int args_converted;
1585 int property_len;
1586
1587 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001588#ifdef _LE_CAMERA_
1589 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1590 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1591 property_len = property_get("persist.camera.hal3.feature",
1592 feature_mask_value, swtnr_feature_mask_value);
1593#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001594 property_len = property_get("persist.camera.hal3.feature",
1595 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001596#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001597 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1598 (feature_mask_value[1] == 'x')) {
1599 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1600 } else {
1601 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1602 }
1603 if (1 != args_converted) {
1604 feature_mask = 0;
1605 LOGE("Wrong feature mask %s", feature_mask_value);
1606 return;
1607 }
1608
1609 switch (stream_format) {
1610 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1611 /* Add LLVD to pp feature mask only if video hint is enabled */
1612 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1613 mStreamConfigInfo.postprocess_mask[stream_idx]
1614 |= CAM_QTI_FEATURE_SW_TNR;
1615 LOGH("Added SW TNR to pp feature mask");
1616 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1617 mStreamConfigInfo.postprocess_mask[stream_idx]
1618 |= CAM_QCOM_FEATURE_LLVD;
1619 LOGH("Added LLVD SeeMore to pp feature mask");
1620 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001621 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1622 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1623 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1624 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001625 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1626 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1627 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1628 CAM_QTI_FEATURE_BINNING_CORRECTION;
1629 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001630 break;
1631 }
1632 default:
1633 break;
1634 }
1635 LOGD("PP feature mask %llx",
1636 mStreamConfigInfo.postprocess_mask[stream_idx]);
1637}
1638
1639/*==============================================================================
1640 * FUNCTION : updateFpsInPreviewBuffer
1641 *
1642 * DESCRIPTION: update FPS information in preview buffer.
1643 *
1644 * PARAMETERS :
1645 * @metadata : pointer to metadata buffer
1646 * @frame_number: frame_number to look for in pending buffer list
1647 *
1648 * RETURN : None
1649 *
1650 *==========================================================================*/
1651void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1652 uint32_t frame_number)
1653{
1654 // Mark all pending buffers for this particular request
1655 // with corresponding framerate information
1656 for (List<PendingBuffersInRequest>::iterator req =
1657 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1658 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1659 for(List<PendingBufferInfo>::iterator j =
1660 req->mPendingBufferList.begin();
1661 j != req->mPendingBufferList.end(); j++) {
1662 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1663 if ((req->frame_number == frame_number) &&
1664 (channel->getStreamTypeMask() &
1665 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1666 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1667 CAM_INTF_PARM_FPS_RANGE, metadata) {
1668 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1669 struct private_handle_t *priv_handle =
1670 (struct private_handle_t *)(*(j->buffer));
1671 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1672 }
1673 }
1674 }
1675 }
1676}
1677
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001678/*==============================================================================
1679 * FUNCTION : updateTimeStampInPendingBuffers
1680 *
1681 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1682 * of a frame number
1683 *
1684 * PARAMETERS :
1685 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1686 * @timestamp : timestamp to be set
1687 *
1688 * RETURN : None
1689 *
1690 *==========================================================================*/
1691void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1692 uint32_t frameNumber, nsecs_t timestamp)
1693{
1694 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1695 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1696 if (req->frame_number != frameNumber)
1697 continue;
1698
1699 for (auto k = req->mPendingBufferList.begin();
1700 k != req->mPendingBufferList.end(); k++ ) {
1701 struct private_handle_t *priv_handle =
1702 (struct private_handle_t *) (*(k->buffer));
1703 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1704 }
1705 }
1706 return;
1707}
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709/*===========================================================================
1710 * FUNCTION : configureStreams
1711 *
1712 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1713 * and output streams.
1714 *
1715 * PARAMETERS :
1716 * @stream_list : streams to be configured
1717 *
1718 * RETURN :
1719 *
1720 *==========================================================================*/
1721int QCamera3HardwareInterface::configureStreams(
1722 camera3_stream_configuration_t *streamList)
1723{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001724 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001725 int rc = 0;
1726
1727 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001728 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001729 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001730 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001731
1732 return rc;
1733}
1734
1735/*===========================================================================
1736 * FUNCTION : configureStreamsPerfLocked
1737 *
1738 * DESCRIPTION: configureStreams while perfLock is held.
1739 *
1740 * PARAMETERS :
1741 * @stream_list : streams to be configured
1742 *
1743 * RETURN : int32_t type of status
1744 * NO_ERROR -- success
1745 * none-zero failure code
1746 *==========================================================================*/
1747int QCamera3HardwareInterface::configureStreamsPerfLocked(
1748 camera3_stream_configuration_t *streamList)
1749{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001750 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001751 int rc = 0;
1752
1753 // Sanity check stream_list
1754 if (streamList == NULL) {
1755 LOGE("NULL stream configuration");
1756 return BAD_VALUE;
1757 }
1758 if (streamList->streams == NULL) {
1759 LOGE("NULL stream list");
1760 return BAD_VALUE;
1761 }
1762
1763 if (streamList->num_streams < 1) {
1764 LOGE("Bad number of streams requested: %d",
1765 streamList->num_streams);
1766 return BAD_VALUE;
1767 }
1768
1769 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1770 LOGE("Maximum number of streams %d exceeded: %d",
1771 MAX_NUM_STREAMS, streamList->num_streams);
1772 return BAD_VALUE;
1773 }
1774
Jason Leec4cf5032017-05-24 18:31:41 -07001775 mOpMode = streamList->operation_mode;
1776 LOGD("mOpMode: %d", mOpMode);
1777
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001778 rc = validateUsageFlags(streamList);
1779 if (rc != NO_ERROR) {
1780 return rc;
1781 }
1782
Thierry Strudel3d639192016-09-09 11:52:26 -07001783 /* first invalidate all the steams in the mStreamList
1784 * if they appear again, they will be validated */
1785 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1786 it != mStreamInfo.end(); it++) {
1787 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1788 if (channel) {
1789 channel->stop();
1790 }
1791 (*it)->status = INVALID;
1792 }
1793
1794 if (mRawDumpChannel) {
1795 mRawDumpChannel->stop();
1796 delete mRawDumpChannel;
1797 mRawDumpChannel = NULL;
1798 }
1799
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001800 if (mHdrPlusRawSrcChannel) {
1801 mHdrPlusRawSrcChannel->stop();
1802 delete mHdrPlusRawSrcChannel;
1803 mHdrPlusRawSrcChannel = NULL;
1804 }
1805
Thierry Strudel3d639192016-09-09 11:52:26 -07001806 if (mSupportChannel)
1807 mSupportChannel->stop();
1808
1809 if (mAnalysisChannel) {
1810 mAnalysisChannel->stop();
1811 }
1812 if (mMetadataChannel) {
1813 /* If content of mStreamInfo is not 0, there is metadata stream */
1814 mMetadataChannel->stop();
1815 }
1816 if (mChannelHandle) {
1817 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1818 mChannelHandle);
1819 LOGD("stopping channel %d", mChannelHandle);
1820 }
1821
1822 pthread_mutex_lock(&mMutex);
1823
1824 // Check state
1825 switch (mState) {
1826 case INITIALIZED:
1827 case CONFIGURED:
1828 case STARTED:
1829 /* valid state */
1830 break;
1831 default:
1832 LOGE("Invalid state %d", mState);
1833 pthread_mutex_unlock(&mMutex);
1834 return -ENODEV;
1835 }
1836
1837 /* Check whether we have video stream */
1838 m_bIs4KVideo = false;
1839 m_bIsVideo = false;
1840 m_bEisSupportedSize = false;
1841 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001842 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001843 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001844 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001845 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 uint32_t videoWidth = 0U;
1847 uint32_t videoHeight = 0U;
1848 size_t rawStreamCnt = 0;
1849 size_t stallStreamCnt = 0;
1850 size_t processedStreamCnt = 0;
1851 // Number of streams on ISP encoder path
1852 size_t numStreamsOnEncoder = 0;
1853 size_t numYuv888OnEncoder = 0;
1854 bool bYuv888OverrideJpeg = false;
1855 cam_dimension_t largeYuv888Size = {0, 0};
1856 cam_dimension_t maxViewfinderSize = {0, 0};
1857 bool bJpegExceeds4K = false;
1858 bool bJpegOnEncoder = false;
1859 bool bUseCommonFeatureMask = false;
1860 cam_feature_mask_t commonFeatureMask = 0;
1861 bool bSmallJpegSize = false;
1862 uint32_t width_ratio;
1863 uint32_t height_ratio;
1864 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1865 camera3_stream_t *inputStream = NULL;
1866 bool isJpeg = false;
1867 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001868 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001869 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001870
1871 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1872
1873 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001874 uint8_t eis_prop_set;
1875 uint32_t maxEisWidth = 0;
1876 uint32_t maxEisHeight = 0;
1877
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001878 // Initialize all instant AEC related variables
1879 mInstantAEC = false;
1880 mResetInstantAEC = false;
1881 mInstantAECSettledFrameNumber = 0;
1882 mAecSkipDisplayFrameBound = 0;
1883 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001884 mCurrFeatureState = 0;
1885 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001886
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1888
1889 size_t count = IS_TYPE_MAX;
1890 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1891 for (size_t i = 0; i < count; i++) {
1892 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001893 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1894 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001895 break;
1896 }
1897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001898
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001899 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001900 maxEisWidth = MAX_EIS_WIDTH;
1901 maxEisHeight = MAX_EIS_HEIGHT;
1902 }
1903
1904 /* EIS setprop control */
1905 char eis_prop[PROPERTY_VALUE_MAX];
1906 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001907 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001908 eis_prop_set = (uint8_t)atoi(eis_prop);
1909
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001910 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001911 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1912
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001913 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1914 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001915
Thierry Strudel3d639192016-09-09 11:52:26 -07001916 /* stream configurations */
1917 for (size_t i = 0; i < streamList->num_streams; i++) {
1918 camera3_stream_t *newStream = streamList->streams[i];
1919 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1920 "height = %d, rotation = %d, usage = 0x%x",
1921 i, newStream->stream_type, newStream->format,
1922 newStream->width, newStream->height, newStream->rotation,
1923 newStream->usage);
1924 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1925 newStream->stream_type == CAMERA3_STREAM_INPUT){
1926 isZsl = true;
1927 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001928 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1929 IS_USAGE_PREVIEW(newStream->usage)) {
1930 isPreview = true;
1931 }
1932
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1934 inputStream = newStream;
1935 }
1936
Emilian Peev7650c122017-01-19 08:24:33 -08001937 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1938 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001939 isJpeg = true;
1940 jpegSize.width = newStream->width;
1941 jpegSize.height = newStream->height;
1942 if (newStream->width > VIDEO_4K_WIDTH ||
1943 newStream->height > VIDEO_4K_HEIGHT)
1944 bJpegExceeds4K = true;
1945 }
1946
1947 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1948 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1949 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001950 // In HAL3 we can have multiple different video streams.
1951 // The variables video width and height are used below as
1952 // dimensions of the biggest of them
1953 if (videoWidth < newStream->width ||
1954 videoHeight < newStream->height) {
1955 videoWidth = newStream->width;
1956 videoHeight = newStream->height;
1957 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001958 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1959 (VIDEO_4K_HEIGHT <= newStream->height)) {
1960 m_bIs4KVideo = true;
1961 }
1962 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1963 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001964
Thierry Strudel3d639192016-09-09 11:52:26 -07001965 }
1966 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1967 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1968 switch (newStream->format) {
1969 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001970 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1971 depthPresent = true;
1972 break;
1973 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001974 stallStreamCnt++;
1975 if (isOnEncoder(maxViewfinderSize, newStream->width,
1976 newStream->height)) {
1977 numStreamsOnEncoder++;
1978 bJpegOnEncoder = true;
1979 }
1980 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1981 newStream->width);
1982 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1983 newStream->height);;
1984 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1985 "FATAL: max_downscale_factor cannot be zero and so assert");
1986 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1987 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1988 LOGH("Setting small jpeg size flag to true");
1989 bSmallJpegSize = true;
1990 }
1991 break;
1992 case HAL_PIXEL_FORMAT_RAW10:
1993 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1994 case HAL_PIXEL_FORMAT_RAW16:
1995 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001996 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1997 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1998 pdStatCount++;
1999 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002000 break;
2001 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2002 processedStreamCnt++;
2003 if (isOnEncoder(maxViewfinderSize, newStream->width,
2004 newStream->height)) {
2005 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2006 !IS_USAGE_ZSL(newStream->usage)) {
2007 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2008 }
2009 numStreamsOnEncoder++;
2010 }
2011 break;
2012 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2013 processedStreamCnt++;
2014 if (isOnEncoder(maxViewfinderSize, newStream->width,
2015 newStream->height)) {
2016 // If Yuv888 size is not greater than 4K, set feature mask
2017 // to SUPERSET so that it support concurrent request on
2018 // YUV and JPEG.
2019 if (newStream->width <= VIDEO_4K_WIDTH &&
2020 newStream->height <= VIDEO_4K_HEIGHT) {
2021 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2022 }
2023 numStreamsOnEncoder++;
2024 numYuv888OnEncoder++;
2025 largeYuv888Size.width = newStream->width;
2026 largeYuv888Size.height = newStream->height;
2027 }
2028 break;
2029 default:
2030 processedStreamCnt++;
2031 if (isOnEncoder(maxViewfinderSize, newStream->width,
2032 newStream->height)) {
2033 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2034 numStreamsOnEncoder++;
2035 }
2036 break;
2037 }
2038
2039 }
2040 }
2041
2042 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2043 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2044 !m_bIsVideo) {
2045 m_bEisEnable = false;
2046 }
2047
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002048 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2049 pthread_mutex_unlock(&mMutex);
2050 return -EINVAL;
2051 }
2052
Thierry Strudel54dc9782017-02-15 12:12:10 -08002053 uint8_t forceEnableTnr = 0;
2054 char tnr_prop[PROPERTY_VALUE_MAX];
2055 memset(tnr_prop, 0, sizeof(tnr_prop));
2056 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2057 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2058
Thierry Strudel3d639192016-09-09 11:52:26 -07002059 /* Logic to enable/disable TNR based on specific config size/etc.*/
2060 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002061 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2062 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002063 else if (forceEnableTnr)
2064 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002065
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002066 char videoHdrProp[PROPERTY_VALUE_MAX];
2067 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2068 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2069 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2070
2071 if (hdr_mode_prop == 1 && m_bIsVideo &&
2072 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2073 m_bVideoHdrEnabled = true;
2074 else
2075 m_bVideoHdrEnabled = false;
2076
2077
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 /* Check if num_streams is sane */
2079 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2080 rawStreamCnt > MAX_RAW_STREAMS ||
2081 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2082 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2083 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2084 pthread_mutex_unlock(&mMutex);
2085 return -EINVAL;
2086 }
2087 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002088 if (isZsl && m_bIs4KVideo) {
2089 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002090 pthread_mutex_unlock(&mMutex);
2091 return -EINVAL;
2092 }
2093 /* Check if stream sizes are sane */
2094 if (numStreamsOnEncoder > 2) {
2095 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2096 pthread_mutex_unlock(&mMutex);
2097 return -EINVAL;
2098 } else if (1 < numStreamsOnEncoder){
2099 bUseCommonFeatureMask = true;
2100 LOGH("Multiple streams above max viewfinder size, common mask needed");
2101 }
2102
2103 /* Check if BLOB size is greater than 4k in 4k recording case */
2104 if (m_bIs4KVideo && bJpegExceeds4K) {
2105 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2106 pthread_mutex_unlock(&mMutex);
2107 return -EINVAL;
2108 }
2109
Emilian Peev7650c122017-01-19 08:24:33 -08002110 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2111 depthPresent) {
2112 LOGE("HAL doesn't support depth streams in HFR mode!");
2113 pthread_mutex_unlock(&mMutex);
2114 return -EINVAL;
2115 }
2116
Thierry Strudel3d639192016-09-09 11:52:26 -07002117 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2118 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2119 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2120 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2121 // configurations:
2122 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2123 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2124 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2125 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2126 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2127 __func__);
2128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131
2132 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2133 // the YUV stream's size is greater or equal to the JPEG size, set common
2134 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2135 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2136 jpegSize.width, jpegSize.height) &&
2137 largeYuv888Size.width > jpegSize.width &&
2138 largeYuv888Size.height > jpegSize.height) {
2139 bYuv888OverrideJpeg = true;
2140 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2141 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2142 }
2143
2144 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2145 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2146 commonFeatureMask);
2147 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2148 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2149
2150 rc = validateStreamDimensions(streamList);
2151 if (rc == NO_ERROR) {
2152 rc = validateStreamRotations(streamList);
2153 }
2154 if (rc != NO_ERROR) {
2155 LOGE("Invalid stream configuration requested!");
2156 pthread_mutex_unlock(&mMutex);
2157 return rc;
2158 }
2159
Emilian Peev0f3c3162017-03-15 12:57:46 +00002160 if (1 < pdStatCount) {
2161 LOGE("HAL doesn't support multiple PD streams");
2162 pthread_mutex_unlock(&mMutex);
2163 return -EINVAL;
2164 }
2165
2166 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2167 (1 == pdStatCount)) {
2168 LOGE("HAL doesn't support PD streams in HFR mode!");
2169 pthread_mutex_unlock(&mMutex);
2170 return -EINVAL;
2171 }
2172
Thierry Strudel3d639192016-09-09 11:52:26 -07002173 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2174 for (size_t i = 0; i < streamList->num_streams; i++) {
2175 camera3_stream_t *newStream = streamList->streams[i];
2176 LOGH("newStream type = %d, stream format = %d "
2177 "stream size : %d x %d, stream rotation = %d",
2178 newStream->stream_type, newStream->format,
2179 newStream->width, newStream->height, newStream->rotation);
2180 //if the stream is in the mStreamList validate it
2181 bool stream_exists = false;
2182 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2183 it != mStreamInfo.end(); it++) {
2184 if ((*it)->stream == newStream) {
2185 QCamera3ProcessingChannel *channel =
2186 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2187 stream_exists = true;
2188 if (channel)
2189 delete channel;
2190 (*it)->status = VALID;
2191 (*it)->stream->priv = NULL;
2192 (*it)->channel = NULL;
2193 }
2194 }
2195 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2196 //new stream
2197 stream_info_t* stream_info;
2198 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2199 if (!stream_info) {
2200 LOGE("Could not allocate stream info");
2201 rc = -ENOMEM;
2202 pthread_mutex_unlock(&mMutex);
2203 return rc;
2204 }
2205 stream_info->stream = newStream;
2206 stream_info->status = VALID;
2207 stream_info->channel = NULL;
2208 mStreamInfo.push_back(stream_info);
2209 }
2210 /* Covers Opaque ZSL and API1 F/W ZSL */
2211 if (IS_USAGE_ZSL(newStream->usage)
2212 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2213 if (zslStream != NULL) {
2214 LOGE("Multiple input/reprocess streams requested!");
2215 pthread_mutex_unlock(&mMutex);
2216 return BAD_VALUE;
2217 }
2218 zslStream = newStream;
2219 }
2220 /* Covers YUV reprocess */
2221 if (inputStream != NULL) {
2222 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2223 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2224 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2225 && inputStream->width == newStream->width
2226 && inputStream->height == newStream->height) {
2227 if (zslStream != NULL) {
2228 /* This scenario indicates multiple YUV streams with same size
2229 * as input stream have been requested, since zsl stream handle
2230 * is solely use for the purpose of overriding the size of streams
2231 * which share h/w streams we will just make a guess here as to
2232 * which of the stream is a ZSL stream, this will be refactored
2233 * once we make generic logic for streams sharing encoder output
2234 */
2235 LOGH("Warning, Multiple ip/reprocess streams requested!");
2236 }
2237 zslStream = newStream;
2238 }
2239 }
2240 }
2241
2242 /* If a zsl stream is set, we know that we have configured at least one input or
2243 bidirectional stream */
2244 if (NULL != zslStream) {
2245 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2246 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2247 mInputStreamInfo.format = zslStream->format;
2248 mInputStreamInfo.usage = zslStream->usage;
2249 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2250 mInputStreamInfo.dim.width,
2251 mInputStreamInfo.dim.height,
2252 mInputStreamInfo.format, mInputStreamInfo.usage);
2253 }
2254
2255 cleanAndSortStreamInfo();
2256 if (mMetadataChannel) {
2257 delete mMetadataChannel;
2258 mMetadataChannel = NULL;
2259 }
2260 if (mSupportChannel) {
2261 delete mSupportChannel;
2262 mSupportChannel = NULL;
2263 }
2264
2265 if (mAnalysisChannel) {
2266 delete mAnalysisChannel;
2267 mAnalysisChannel = NULL;
2268 }
2269
2270 if (mDummyBatchChannel) {
2271 delete mDummyBatchChannel;
2272 mDummyBatchChannel = NULL;
2273 }
2274
Emilian Peev7650c122017-01-19 08:24:33 -08002275 if (mDepthChannel) {
2276 mDepthChannel = NULL;
2277 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002278 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002279
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002280 mShutterDispatcher.clear();
2281 mOutputBufferDispatcher.clear();
2282
Thierry Strudel2896d122017-02-23 19:18:03 -08002283 char is_type_value[PROPERTY_VALUE_MAX];
2284 property_get("persist.camera.is_type", is_type_value, "4");
2285 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2286
Binhao Line406f062017-05-03 14:39:44 -07002287 char property_value[PROPERTY_VALUE_MAX];
2288 property_get("persist.camera.gzoom.at", property_value, "0");
2289 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002290 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2291 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2292 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2293 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002294
2295 property_get("persist.camera.gzoom.4k", property_value, "0");
2296 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2297
Thierry Strudel3d639192016-09-09 11:52:26 -07002298 //Create metadata channel and initialize it
2299 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2300 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2301 gCamCapability[mCameraId]->color_arrangement);
2302 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2303 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002304 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 if (mMetadataChannel == NULL) {
2306 LOGE("failed to allocate metadata channel");
2307 rc = -ENOMEM;
2308 pthread_mutex_unlock(&mMutex);
2309 return rc;
2310 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002311 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002312 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2313 if (rc < 0) {
2314 LOGE("metadata channel initialization failed");
2315 delete mMetadataChannel;
2316 mMetadataChannel = NULL;
2317 pthread_mutex_unlock(&mMutex);
2318 return rc;
2319 }
2320
Thierry Strudel2896d122017-02-23 19:18:03 -08002321 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002322 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002323 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002324 // Keep track of preview/video streams indices.
2325 // There could be more than one preview streams, but only one video stream.
2326 int32_t video_stream_idx = -1;
2327 int32_t preview_stream_idx[streamList->num_streams];
2328 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002329 bool previewTnr[streamList->num_streams];
2330 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2331 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2332 // Loop through once to determine preview TNR conditions before creating channels.
2333 for (size_t i = 0; i < streamList->num_streams; i++) {
2334 camera3_stream_t *newStream = streamList->streams[i];
2335 uint32_t stream_usage = newStream->usage;
2336 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2337 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2338 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2339 video_stream_idx = (int32_t)i;
2340 else
2341 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2342 }
2343 }
2344 // By default, preview stream TNR is disabled.
2345 // Enable TNR to the preview stream if all conditions below are satisfied:
2346 // 1. preview resolution == video resolution.
2347 // 2. video stream TNR is enabled.
2348 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2349 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2350 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2351 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2352 if (m_bTnrEnabled && m_bTnrVideo &&
2353 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2354 video_stream->width == preview_stream->width &&
2355 video_stream->height == preview_stream->height) {
2356 previewTnr[preview_stream_idx[i]] = true;
2357 }
2358 }
2359
Thierry Strudel3d639192016-09-09 11:52:26 -07002360 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2361 /* Allocate channel objects for the requested streams */
2362 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002363
Thierry Strudel3d639192016-09-09 11:52:26 -07002364 camera3_stream_t *newStream = streamList->streams[i];
2365 uint32_t stream_usage = newStream->usage;
2366 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2367 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2368 struct camera_info *p_info = NULL;
2369 pthread_mutex_lock(&gCamLock);
2370 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2371 pthread_mutex_unlock(&gCamLock);
2372 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2373 || IS_USAGE_ZSL(newStream->usage)) &&
2374 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002375 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002376 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002377 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2378 if (bUseCommonFeatureMask)
2379 zsl_ppmask = commonFeatureMask;
2380 else
2381 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002382 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002383 if (numStreamsOnEncoder > 0)
2384 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2385 else
2386 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002387 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002388 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002390 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 LOGH("Input stream configured, reprocess config");
2392 } else {
2393 //for non zsl streams find out the format
2394 switch (newStream->format) {
2395 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2396 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002397 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2399 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2400 /* add additional features to pp feature mask */
2401 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2402 mStreamConfigInfo.num_streams);
2403
2404 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2405 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2406 CAM_STREAM_TYPE_VIDEO;
2407 if (m_bTnrEnabled && m_bTnrVideo) {
2408 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2409 CAM_QCOM_FEATURE_CPP_TNR;
2410 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2411 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2412 ~CAM_QCOM_FEATURE_CDS;
2413 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2415 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2416 CAM_QTI_FEATURE_PPEISCORE;
2417 }
Binhao Line406f062017-05-03 14:39:44 -07002418 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2419 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2420 CAM_QCOM_FEATURE_GOOG_ZOOM;
2421 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002422 } else {
2423 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2424 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002425 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2427 CAM_QCOM_FEATURE_CPP_TNR;
2428 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2430 ~CAM_QCOM_FEATURE_CDS;
2431 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002432 if(!m_bSwTnrPreview) {
2433 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2434 ~CAM_QTI_FEATURE_SW_TNR;
2435 }
Binhao Line406f062017-05-03 14:39:44 -07002436 if (is_goog_zoom_preview_enabled) {
2437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2438 CAM_QCOM_FEATURE_GOOG_ZOOM;
2439 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002440 padding_info.width_padding = mSurfaceStridePadding;
2441 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002442 previewSize.width = (int32_t)newStream->width;
2443 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002444 }
2445 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2446 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2447 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2448 newStream->height;
2449 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2450 newStream->width;
2451 }
2452 }
2453 break;
2454 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002455 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002456 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2457 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2458 if (bUseCommonFeatureMask)
2459 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2460 commonFeatureMask;
2461 else
2462 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2463 CAM_QCOM_FEATURE_NONE;
2464 } else {
2465 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2466 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2467 }
2468 break;
2469 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002470 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002471 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2472 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2473 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2474 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2475 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002476 /* Remove rotation if it is not supported
2477 for 4K LiveVideo snapshot case (online processing) */
2478 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2479 CAM_QCOM_FEATURE_ROTATION)) {
2480 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2481 &= ~CAM_QCOM_FEATURE_ROTATION;
2482 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 } else {
2484 if (bUseCommonFeatureMask &&
2485 isOnEncoder(maxViewfinderSize, newStream->width,
2486 newStream->height)) {
2487 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2488 } else {
2489 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2490 }
2491 }
2492 if (isZsl) {
2493 if (zslStream) {
2494 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2495 (int32_t)zslStream->width;
2496 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2497 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002500 } else {
2501 LOGE("Error, No ZSL stream identified");
2502 pthread_mutex_unlock(&mMutex);
2503 return -EINVAL;
2504 }
2505 } else if (m_bIs4KVideo) {
2506 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2507 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2508 } else if (bYuv888OverrideJpeg) {
2509 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2510 (int32_t)largeYuv888Size.width;
2511 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2512 (int32_t)largeYuv888Size.height;
2513 }
2514 break;
2515 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2516 case HAL_PIXEL_FORMAT_RAW16:
2517 case HAL_PIXEL_FORMAT_RAW10:
2518 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2520 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002521 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2522 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2523 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2524 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2525 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2526 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2527 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2528 gCamCapability[mCameraId]->dt[mPDIndex];
2529 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2530 gCamCapability[mCameraId]->vc[mPDIndex];
2531 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002532 break;
2533 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002534 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002535 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2536 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2537 break;
2538 }
2539 }
2540
2541 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2542 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2543 gCamCapability[mCameraId]->color_arrangement);
2544
2545 if (newStream->priv == NULL) {
2546 //New stream, construct channel
2547 switch (newStream->stream_type) {
2548 case CAMERA3_STREAM_INPUT:
2549 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2550 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2551 break;
2552 case CAMERA3_STREAM_BIDIRECTIONAL:
2553 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2554 GRALLOC_USAGE_HW_CAMERA_WRITE;
2555 break;
2556 case CAMERA3_STREAM_OUTPUT:
2557 /* For video encoding stream, set read/write rarely
2558 * flag so that they may be set to un-cached */
2559 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2560 newStream->usage |=
2561 (GRALLOC_USAGE_SW_READ_RARELY |
2562 GRALLOC_USAGE_SW_WRITE_RARELY |
2563 GRALLOC_USAGE_HW_CAMERA_WRITE);
2564 else if (IS_USAGE_ZSL(newStream->usage))
2565 {
2566 LOGD("ZSL usage flag skipping");
2567 }
2568 else if (newStream == zslStream
2569 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2570 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2571 } else
2572 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2573 break;
2574 default:
2575 LOGE("Invalid stream_type %d", newStream->stream_type);
2576 break;
2577 }
2578
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002579 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002580 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2581 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2582 QCamera3ProcessingChannel *channel = NULL;
2583 switch (newStream->format) {
2584 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2585 if ((newStream->usage &
2586 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2587 (streamList->operation_mode ==
2588 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2589 ) {
2590 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2591 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002592 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002593 this,
2594 newStream,
2595 (cam_stream_type_t)
2596 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2597 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2598 mMetadataChannel,
2599 0); //heap buffers are not required for HFR video channel
2600 if (channel == NULL) {
2601 LOGE("allocation of channel failed");
2602 pthread_mutex_unlock(&mMutex);
2603 return -ENOMEM;
2604 }
2605 //channel->getNumBuffers() will return 0 here so use
2606 //MAX_INFLIGH_HFR_REQUESTS
2607 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2608 newStream->priv = channel;
2609 LOGI("num video buffers in HFR mode: %d",
2610 MAX_INFLIGHT_HFR_REQUESTS);
2611 } else {
2612 /* Copy stream contents in HFR preview only case to create
2613 * dummy batch channel so that sensor streaming is in
2614 * HFR mode */
2615 if (!m_bIsVideo && (streamList->operation_mode ==
2616 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2617 mDummyBatchStream = *newStream;
2618 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002619 int bufferCount = MAX_INFLIGHT_REQUESTS;
2620 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2621 CAM_STREAM_TYPE_VIDEO) {
2622 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2623 bufferCount = MAX_VIDEO_BUFFERS;
2624 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002625 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2626 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002627 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002628 this,
2629 newStream,
2630 (cam_stream_type_t)
2631 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2632 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2633 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002634 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002635 if (channel == NULL) {
2636 LOGE("allocation of channel failed");
2637 pthread_mutex_unlock(&mMutex);
2638 return -ENOMEM;
2639 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002640 /* disable UBWC for preview, though supported,
2641 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002642 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002643 (previewSize.width == (int32_t)videoWidth)&&
2644 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002645 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002646 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002647 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002648 /* When goog_zoom is linked to the preview or video stream,
2649 * disable ubwc to the linked stream */
2650 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2651 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2652 channel->setUBWCEnabled(false);
2653 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002654 newStream->max_buffers = channel->getNumBuffers();
2655 newStream->priv = channel;
2656 }
2657 break;
2658 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2659 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2660 mChannelHandle,
2661 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002662 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002663 this,
2664 newStream,
2665 (cam_stream_type_t)
2666 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2667 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2668 mMetadataChannel);
2669 if (channel == NULL) {
2670 LOGE("allocation of YUV channel failed");
2671 pthread_mutex_unlock(&mMutex);
2672 return -ENOMEM;
2673 }
2674 newStream->max_buffers = channel->getNumBuffers();
2675 newStream->priv = channel;
2676 break;
2677 }
2678 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2679 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002680 case HAL_PIXEL_FORMAT_RAW10: {
2681 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2682 (HAL_DATASPACE_DEPTH != newStream->data_space))
2683 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002684 mRawChannel = new QCamera3RawChannel(
2685 mCameraHandle->camera_handle, mChannelHandle,
2686 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002687 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002688 this, newStream,
2689 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002690 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002691 if (mRawChannel == NULL) {
2692 LOGE("allocation of raw channel failed");
2693 pthread_mutex_unlock(&mMutex);
2694 return -ENOMEM;
2695 }
2696 newStream->max_buffers = mRawChannel->getNumBuffers();
2697 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2698 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002699 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002700 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002701 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2702 mDepthChannel = new QCamera3DepthChannel(
2703 mCameraHandle->camera_handle, mChannelHandle,
2704 mCameraHandle->ops, NULL, NULL, &padding_info,
2705 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2706 mMetadataChannel);
2707 if (NULL == mDepthChannel) {
2708 LOGE("Allocation of depth channel failed");
2709 pthread_mutex_unlock(&mMutex);
2710 return NO_MEMORY;
2711 }
2712 newStream->priv = mDepthChannel;
2713 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2714 } else {
2715 // Max live snapshot inflight buffer is 1. This is to mitigate
2716 // frame drop issues for video snapshot. The more buffers being
2717 // allocated, the more frame drops there are.
2718 mPictureChannel = new QCamera3PicChannel(
2719 mCameraHandle->camera_handle, mChannelHandle,
2720 mCameraHandle->ops, captureResultCb,
2721 setBufferErrorStatus, &padding_info, this, newStream,
2722 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2723 m_bIs4KVideo, isZsl, mMetadataChannel,
2724 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2725 if (mPictureChannel == NULL) {
2726 LOGE("allocation of channel failed");
2727 pthread_mutex_unlock(&mMutex);
2728 return -ENOMEM;
2729 }
2730 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2731 newStream->max_buffers = mPictureChannel->getNumBuffers();
2732 mPictureChannel->overrideYuvSize(
2733 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2734 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002735 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002736 break;
2737
2738 default:
2739 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002740 pthread_mutex_unlock(&mMutex);
2741 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002742 }
2743 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2744 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2745 } else {
2746 LOGE("Error, Unknown stream type");
2747 pthread_mutex_unlock(&mMutex);
2748 return -EINVAL;
2749 }
2750
2751 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002752 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002753 // Here we only care whether it's EIS3 or not
2754 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2755 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2756 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2757 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002758 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002759 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002760 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002761 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2762 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2763 }
2764 }
2765
2766 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2767 it != mStreamInfo.end(); it++) {
2768 if ((*it)->stream == newStream) {
2769 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2770 break;
2771 }
2772 }
2773 } else {
2774 // Channel already exists for this stream
2775 // Do nothing for now
2776 }
2777 padding_info = gCamCapability[mCameraId]->padding_info;
2778
Emilian Peev7650c122017-01-19 08:24:33 -08002779 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002780 * since there is no real stream associated with it
2781 */
Emilian Peev7650c122017-01-19 08:24:33 -08002782 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002783 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2784 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002785 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002787 }
2788
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002789 // Let buffer dispatcher know the configured streams.
2790 mOutputBufferDispatcher.configureStreams(streamList);
2791
Thierry Strudel2896d122017-02-23 19:18:03 -08002792 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2793 onlyRaw = false;
2794 }
2795
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002796 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002797 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002798 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002799 cam_analysis_info_t analysisInfo;
2800 int32_t ret = NO_ERROR;
2801 ret = mCommon.getAnalysisInfo(
2802 FALSE,
2803 analysisFeatureMask,
2804 &analysisInfo);
2805 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002806 cam_color_filter_arrangement_t analysis_color_arrangement =
2807 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2808 CAM_FILTER_ARRANGEMENT_Y :
2809 gCamCapability[mCameraId]->color_arrangement);
2810 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2811 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002812 cam_dimension_t analysisDim;
2813 analysisDim = mCommon.getMatchingDimension(previewSize,
2814 analysisInfo.analysis_recommended_res);
2815
2816 mAnalysisChannel = new QCamera3SupportChannel(
2817 mCameraHandle->camera_handle,
2818 mChannelHandle,
2819 mCameraHandle->ops,
2820 &analysisInfo.analysis_padding_info,
2821 analysisFeatureMask,
2822 CAM_STREAM_TYPE_ANALYSIS,
2823 &analysisDim,
2824 (analysisInfo.analysis_format
2825 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2826 : CAM_FORMAT_YUV_420_NV21),
2827 analysisInfo.hw_analysis_supported,
2828 gCamCapability[mCameraId]->color_arrangement,
2829 this,
2830 0); // force buffer count to 0
2831 } else {
2832 LOGW("getAnalysisInfo failed, ret = %d", ret);
2833 }
2834 if (!mAnalysisChannel) {
2835 LOGW("Analysis channel cannot be created");
2836 }
2837 }
2838
Thierry Strudel3d639192016-09-09 11:52:26 -07002839 //RAW DUMP channel
2840 if (mEnableRawDump && isRawStreamRequested == false){
2841 cam_dimension_t rawDumpSize;
2842 rawDumpSize = getMaxRawSize(mCameraId);
2843 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2844 setPAAFSupport(rawDumpFeatureMask,
2845 CAM_STREAM_TYPE_RAW,
2846 gCamCapability[mCameraId]->color_arrangement);
2847 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2848 mChannelHandle,
2849 mCameraHandle->ops,
2850 rawDumpSize,
2851 &padding_info,
2852 this, rawDumpFeatureMask);
2853 if (!mRawDumpChannel) {
2854 LOGE("Raw Dump channel cannot be created");
2855 pthread_mutex_unlock(&mMutex);
2856 return -ENOMEM;
2857 }
2858 }
2859
Thierry Strudel3d639192016-09-09 11:52:26 -07002860 if (mAnalysisChannel) {
2861 cam_analysis_info_t analysisInfo;
2862 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2863 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2864 CAM_STREAM_TYPE_ANALYSIS;
2865 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2866 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002867 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002868 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2869 &analysisInfo);
2870 if (rc != NO_ERROR) {
2871 LOGE("getAnalysisInfo failed, ret = %d", rc);
2872 pthread_mutex_unlock(&mMutex);
2873 return rc;
2874 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002875 cam_color_filter_arrangement_t analysis_color_arrangement =
2876 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2877 CAM_FILTER_ARRANGEMENT_Y :
2878 gCamCapability[mCameraId]->color_arrangement);
2879 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2880 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2881 analysis_color_arrangement);
2882
Thierry Strudel3d639192016-09-09 11:52:26 -07002883 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002884 mCommon.getMatchingDimension(previewSize,
2885 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002886 mStreamConfigInfo.num_streams++;
2887 }
2888
Thierry Strudel2896d122017-02-23 19:18:03 -08002889 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002890 cam_analysis_info_t supportInfo;
2891 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2892 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2893 setPAAFSupport(callbackFeatureMask,
2894 CAM_STREAM_TYPE_CALLBACK,
2895 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002896 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002897 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002898 if (ret != NO_ERROR) {
2899 /* Ignore the error for Mono camera
2900 * because the PAAF bit mask is only set
2901 * for CAM_STREAM_TYPE_ANALYSIS stream type
2902 */
2903 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2904 LOGW("getAnalysisInfo failed, ret = %d", ret);
2905 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002906 }
2907 mSupportChannel = new QCamera3SupportChannel(
2908 mCameraHandle->camera_handle,
2909 mChannelHandle,
2910 mCameraHandle->ops,
2911 &gCamCapability[mCameraId]->padding_info,
2912 callbackFeatureMask,
2913 CAM_STREAM_TYPE_CALLBACK,
2914 &QCamera3SupportChannel::kDim,
2915 CAM_FORMAT_YUV_420_NV21,
2916 supportInfo.hw_analysis_supported,
2917 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002918 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002919 if (!mSupportChannel) {
2920 LOGE("dummy channel cannot be created");
2921 pthread_mutex_unlock(&mMutex);
2922 return -ENOMEM;
2923 }
2924 }
2925
2926 if (mSupportChannel) {
2927 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2928 QCamera3SupportChannel::kDim;
2929 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2930 CAM_STREAM_TYPE_CALLBACK;
2931 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2932 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2933 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2934 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2935 gCamCapability[mCameraId]->color_arrangement);
2936 mStreamConfigInfo.num_streams++;
2937 }
2938
2939 if (mRawDumpChannel) {
2940 cam_dimension_t rawSize;
2941 rawSize = getMaxRawSize(mCameraId);
2942 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2943 rawSize;
2944 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2945 CAM_STREAM_TYPE_RAW;
2946 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2947 CAM_QCOM_FEATURE_NONE;
2948 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2949 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2950 gCamCapability[mCameraId]->color_arrangement);
2951 mStreamConfigInfo.num_streams++;
2952 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002953
2954 if (mHdrPlusRawSrcChannel) {
2955 cam_dimension_t rawSize;
2956 rawSize = getMaxRawSize(mCameraId);
2957 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2958 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2959 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2960 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2961 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2962 gCamCapability[mCameraId]->color_arrangement);
2963 mStreamConfigInfo.num_streams++;
2964 }
2965
Thierry Strudel3d639192016-09-09 11:52:26 -07002966 /* In HFR mode, if video stream is not added, create a dummy channel so that
2967 * ISP can create a batch mode even for preview only case. This channel is
2968 * never 'start'ed (no stream-on), it is only 'initialized' */
2969 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2970 !m_bIsVideo) {
2971 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2972 setPAAFSupport(dummyFeatureMask,
2973 CAM_STREAM_TYPE_VIDEO,
2974 gCamCapability[mCameraId]->color_arrangement);
2975 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2976 mChannelHandle,
2977 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002978 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002979 this,
2980 &mDummyBatchStream,
2981 CAM_STREAM_TYPE_VIDEO,
2982 dummyFeatureMask,
2983 mMetadataChannel);
2984 if (NULL == mDummyBatchChannel) {
2985 LOGE("creation of mDummyBatchChannel failed."
2986 "Preview will use non-hfr sensor mode ");
2987 }
2988 }
2989 if (mDummyBatchChannel) {
2990 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2991 mDummyBatchStream.width;
2992 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2993 mDummyBatchStream.height;
2994 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2995 CAM_STREAM_TYPE_VIDEO;
2996 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2997 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2998 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2999 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3000 gCamCapability[mCameraId]->color_arrangement);
3001 mStreamConfigInfo.num_streams++;
3002 }
3003
3004 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3005 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003006 m_bIs4KVideo ? 0 :
3007 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003008
3009 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3010 for (pendingRequestIterator i = mPendingRequestsList.begin();
3011 i != mPendingRequestsList.end();) {
3012 i = erasePendingRequest(i);
3013 }
3014 mPendingFrameDropList.clear();
3015 // Initialize/Reset the pending buffers list
3016 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3017 req.mPendingBufferList.clear();
3018 }
3019 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3020
Thierry Strudel3d639192016-09-09 11:52:26 -07003021 mCurJpegMeta.clear();
3022 //Get min frame duration for this streams configuration
3023 deriveMinFrameDuration();
3024
Chien-Yu Chenee335912017-02-09 17:53:20 -08003025 mFirstPreviewIntentSeen = false;
3026
3027 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003028 {
3029 Mutex::Autolock l(gHdrPlusClientLock);
3030 disableHdrPlusModeLocked();
3031 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003032
Thierry Strudel3d639192016-09-09 11:52:26 -07003033 // Update state
3034 mState = CONFIGURED;
3035
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003036 mFirstMetadataCallback = true;
3037
Thierry Strudel3d639192016-09-09 11:52:26 -07003038 pthread_mutex_unlock(&mMutex);
3039
3040 return rc;
3041}
3042
3043/*===========================================================================
3044 * FUNCTION : validateCaptureRequest
3045 *
3046 * DESCRIPTION: validate a capture request from camera service
3047 *
3048 * PARAMETERS :
3049 * @request : request from framework to process
3050 *
3051 * RETURN :
3052 *
3053 *==========================================================================*/
3054int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003055 camera3_capture_request_t *request,
3056 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003057{
3058 ssize_t idx = 0;
3059 const camera3_stream_buffer_t *b;
3060 CameraMetadata meta;
3061
3062 /* Sanity check the request */
3063 if (request == NULL) {
3064 LOGE("NULL capture request");
3065 return BAD_VALUE;
3066 }
3067
3068 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3069 /*settings cannot be null for the first request*/
3070 return BAD_VALUE;
3071 }
3072
3073 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003074 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3075 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003076 LOGE("Request %d: No output buffers provided!",
3077 __FUNCTION__, frameNumber);
3078 return BAD_VALUE;
3079 }
3080 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3081 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3082 request->num_output_buffers, MAX_NUM_STREAMS);
3083 return BAD_VALUE;
3084 }
3085 if (request->input_buffer != NULL) {
3086 b = request->input_buffer;
3087 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3088 LOGE("Request %d: Buffer %ld: Status not OK!",
3089 frameNumber, (long)idx);
3090 return BAD_VALUE;
3091 }
3092 if (b->release_fence != -1) {
3093 LOGE("Request %d: Buffer %ld: Has a release fence!",
3094 frameNumber, (long)idx);
3095 return BAD_VALUE;
3096 }
3097 if (b->buffer == NULL) {
3098 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3099 frameNumber, (long)idx);
3100 return BAD_VALUE;
3101 }
3102 }
3103
3104 // Validate all buffers
3105 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003106 if (b == NULL) {
3107 return BAD_VALUE;
3108 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003109 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003110 QCamera3ProcessingChannel *channel =
3111 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3112 if (channel == NULL) {
3113 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3114 frameNumber, (long)idx);
3115 return BAD_VALUE;
3116 }
3117 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3118 LOGE("Request %d: Buffer %ld: Status not OK!",
3119 frameNumber, (long)idx);
3120 return BAD_VALUE;
3121 }
3122 if (b->release_fence != -1) {
3123 LOGE("Request %d: Buffer %ld: Has a release fence!",
3124 frameNumber, (long)idx);
3125 return BAD_VALUE;
3126 }
3127 if (b->buffer == NULL) {
3128 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3129 frameNumber, (long)idx);
3130 return BAD_VALUE;
3131 }
3132 if (*(b->buffer) == NULL) {
3133 LOGE("Request %d: Buffer %ld: NULL private handle!",
3134 frameNumber, (long)idx);
3135 return BAD_VALUE;
3136 }
3137 idx++;
3138 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003139 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003140 return NO_ERROR;
3141}
3142
3143/*===========================================================================
3144 * FUNCTION : deriveMinFrameDuration
3145 *
3146 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3147 * on currently configured streams.
3148 *
3149 * PARAMETERS : NONE
3150 *
3151 * RETURN : NONE
3152 *
3153 *==========================================================================*/
3154void QCamera3HardwareInterface::deriveMinFrameDuration()
3155{
3156 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003157 bool hasRaw = false;
3158
3159 mMinRawFrameDuration = 0;
3160 mMinJpegFrameDuration = 0;
3161 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003162
3163 maxJpegDim = 0;
3164 maxProcessedDim = 0;
3165 maxRawDim = 0;
3166
3167 // Figure out maximum jpeg, processed, and raw dimensions
3168 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3169 it != mStreamInfo.end(); it++) {
3170
3171 // Input stream doesn't have valid stream_type
3172 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3173 continue;
3174
3175 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3176 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3177 if (dimension > maxJpegDim)
3178 maxJpegDim = dimension;
3179 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3180 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3181 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003182 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003183 if (dimension > maxRawDim)
3184 maxRawDim = dimension;
3185 } else {
3186 if (dimension > maxProcessedDim)
3187 maxProcessedDim = dimension;
3188 }
3189 }
3190
3191 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3192 MAX_SIZES_CNT);
3193
3194 //Assume all jpeg dimensions are in processed dimensions.
3195 if (maxJpegDim > maxProcessedDim)
3196 maxProcessedDim = maxJpegDim;
3197 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003198 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003199 maxRawDim = INT32_MAX;
3200
3201 for (size_t i = 0; i < count; i++) {
3202 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3203 gCamCapability[mCameraId]->raw_dim[i].height;
3204 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3205 maxRawDim = dimension;
3206 }
3207 }
3208
3209 //Find minimum durations for processed, jpeg, and raw
3210 for (size_t i = 0; i < count; i++) {
3211 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3212 gCamCapability[mCameraId]->raw_dim[i].height) {
3213 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3214 break;
3215 }
3216 }
3217 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3218 for (size_t i = 0; i < count; i++) {
3219 if (maxProcessedDim ==
3220 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3221 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3222 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3223 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3224 break;
3225 }
3226 }
3227}
3228
3229/*===========================================================================
3230 * FUNCTION : getMinFrameDuration
3231 *
3232 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3233 * and current request configuration.
3234 *
3235 * PARAMETERS : @request: requset sent by the frameworks
3236 *
3237 * RETURN : min farme duration for a particular request
3238 *
3239 *==========================================================================*/
3240int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3241{
3242 bool hasJpegStream = false;
3243 bool hasRawStream = false;
3244 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3245 const camera3_stream_t *stream = request->output_buffers[i].stream;
3246 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3247 hasJpegStream = true;
3248 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3249 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3250 stream->format == HAL_PIXEL_FORMAT_RAW16)
3251 hasRawStream = true;
3252 }
3253
3254 if (!hasJpegStream)
3255 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3256 else
3257 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3258}
3259
3260/*===========================================================================
3261 * FUNCTION : handleBuffersDuringFlushLock
3262 *
3263 * DESCRIPTION: Account for buffers returned from back-end during flush
3264 * This function is executed while mMutex is held by the caller.
3265 *
3266 * PARAMETERS :
3267 * @buffer: image buffer for the callback
3268 *
3269 * RETURN :
3270 *==========================================================================*/
3271void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3272{
3273 bool buffer_found = false;
3274 for (List<PendingBuffersInRequest>::iterator req =
3275 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3276 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3277 for (List<PendingBufferInfo>::iterator i =
3278 req->mPendingBufferList.begin();
3279 i != req->mPendingBufferList.end(); i++) {
3280 if (i->buffer == buffer->buffer) {
3281 mPendingBuffersMap.numPendingBufsAtFlush--;
3282 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3283 buffer->buffer, req->frame_number,
3284 mPendingBuffersMap.numPendingBufsAtFlush);
3285 buffer_found = true;
3286 break;
3287 }
3288 }
3289 if (buffer_found) {
3290 break;
3291 }
3292 }
3293 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3294 //signal the flush()
3295 LOGD("All buffers returned to HAL. Continue flush");
3296 pthread_cond_signal(&mBuffersCond);
3297 }
3298}
3299
Thierry Strudel3d639192016-09-09 11:52:26 -07003300/*===========================================================================
3301 * FUNCTION : handleBatchMetadata
3302 *
3303 * DESCRIPTION: Handles metadata buffer callback in batch mode
3304 *
3305 * PARAMETERS : @metadata_buf: metadata buffer
3306 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3307 * the meta buf in this method
3308 *
3309 * RETURN :
3310 *
3311 *==========================================================================*/
3312void QCamera3HardwareInterface::handleBatchMetadata(
3313 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3314{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003315 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003316
3317 if (NULL == metadata_buf) {
3318 LOGE("metadata_buf is NULL");
3319 return;
3320 }
3321 /* In batch mode, the metdata will contain the frame number and timestamp of
3322 * the last frame in the batch. Eg: a batch containing buffers from request
3323 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3324 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3325 * multiple process_capture_results */
3326 metadata_buffer_t *metadata =
3327 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3328 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3329 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3330 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3331 uint32_t frame_number = 0, urgent_frame_number = 0;
3332 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3333 bool invalid_metadata = false;
3334 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3335 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003336 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003337
3338 int32_t *p_frame_number_valid =
3339 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3340 uint32_t *p_frame_number =
3341 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3342 int64_t *p_capture_time =
3343 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3344 int32_t *p_urgent_frame_number_valid =
3345 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3346 uint32_t *p_urgent_frame_number =
3347 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3348
3349 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3350 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3351 (NULL == p_urgent_frame_number)) {
3352 LOGE("Invalid metadata");
3353 invalid_metadata = true;
3354 } else {
3355 frame_number_valid = *p_frame_number_valid;
3356 last_frame_number = *p_frame_number;
3357 last_frame_capture_time = *p_capture_time;
3358 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3359 last_urgent_frame_number = *p_urgent_frame_number;
3360 }
3361
3362 /* In batchmode, when no video buffers are requested, set_parms are sent
3363 * for every capture_request. The difference between consecutive urgent
3364 * frame numbers and frame numbers should be used to interpolate the
3365 * corresponding frame numbers and time stamps */
3366 pthread_mutex_lock(&mMutex);
3367 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003368 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3369 if(idx < 0) {
3370 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3371 last_urgent_frame_number);
3372 mState = ERROR;
3373 pthread_mutex_unlock(&mMutex);
3374 return;
3375 }
3376 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003377 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3378 first_urgent_frame_number;
3379
3380 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3381 urgent_frame_number_valid,
3382 first_urgent_frame_number, last_urgent_frame_number);
3383 }
3384
3385 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003386 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3387 if(idx < 0) {
3388 LOGE("Invalid frame number received: %d. Irrecoverable error",
3389 last_frame_number);
3390 mState = ERROR;
3391 pthread_mutex_unlock(&mMutex);
3392 return;
3393 }
3394 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003395 frameNumDiff = last_frame_number + 1 -
3396 first_frame_number;
3397 mPendingBatchMap.removeItem(last_frame_number);
3398
3399 LOGD("frm: valid: %d frm_num: %d - %d",
3400 frame_number_valid,
3401 first_frame_number, last_frame_number);
3402
3403 }
3404 pthread_mutex_unlock(&mMutex);
3405
3406 if (urgent_frame_number_valid || frame_number_valid) {
3407 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3408 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3409 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3410 urgentFrameNumDiff, last_urgent_frame_number);
3411 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3412 LOGE("frameNumDiff: %d frameNum: %d",
3413 frameNumDiff, last_frame_number);
3414 }
3415
3416 for (size_t i = 0; i < loopCount; i++) {
3417 /* handleMetadataWithLock is called even for invalid_metadata for
3418 * pipeline depth calculation */
3419 if (!invalid_metadata) {
3420 /* Infer frame number. Batch metadata contains frame number of the
3421 * last frame */
3422 if (urgent_frame_number_valid) {
3423 if (i < urgentFrameNumDiff) {
3424 urgent_frame_number =
3425 first_urgent_frame_number + i;
3426 LOGD("inferred urgent frame_number: %d",
3427 urgent_frame_number);
3428 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3429 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3430 } else {
3431 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3432 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3433 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3434 }
3435 }
3436
3437 /* Infer frame number. Batch metadata contains frame number of the
3438 * last frame */
3439 if (frame_number_valid) {
3440 if (i < frameNumDiff) {
3441 frame_number = first_frame_number + i;
3442 LOGD("inferred frame_number: %d", frame_number);
3443 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3444 CAM_INTF_META_FRAME_NUMBER, frame_number);
3445 } else {
3446 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3447 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3448 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3449 }
3450 }
3451
3452 if (last_frame_capture_time) {
3453 //Infer timestamp
3454 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003455 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003456 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003457 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003458 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3459 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3460 LOGD("batch capture_time: %lld, capture_time: %lld",
3461 last_frame_capture_time, capture_time);
3462 }
3463 }
3464 pthread_mutex_lock(&mMutex);
3465 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003466 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003467 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3468 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003469 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003470 pthread_mutex_unlock(&mMutex);
3471 }
3472
3473 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003474 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003475 mMetadataChannel->bufDone(metadata_buf);
3476 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003477 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003478 }
3479}
3480
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003481void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3482 camera3_error_msg_code_t errorCode)
3483{
3484 camera3_notify_msg_t notify_msg;
3485 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3486 notify_msg.type = CAMERA3_MSG_ERROR;
3487 notify_msg.message.error.error_code = errorCode;
3488 notify_msg.message.error.error_stream = NULL;
3489 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003490 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003491
3492 return;
3493}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003494
3495/*===========================================================================
3496 * FUNCTION : sendPartialMetadataWithLock
3497 *
3498 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3499 *
3500 * PARAMETERS : @metadata: metadata buffer
3501 * @requestIter: The iterator for the pending capture request for
3502 * which the partial result is being sen
3503 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3504 * last urgent metadata in a batch. Always true for non-batch mode
3505 *
3506 * RETURN :
3507 *
3508 *==========================================================================*/
3509
3510void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3511 metadata_buffer_t *metadata,
3512 const pendingRequestIterator requestIter,
3513 bool lastUrgentMetadataInBatch)
3514{
3515 camera3_capture_result_t result;
3516 memset(&result, 0, sizeof(camera3_capture_result_t));
3517
3518 requestIter->partial_result_cnt++;
3519
3520 // Extract 3A metadata
3521 result.result = translateCbUrgentMetadataToResultMetadata(
3522 metadata, lastUrgentMetadataInBatch);
3523 // Populate metadata result
3524 result.frame_number = requestIter->frame_number;
3525 result.num_output_buffers = 0;
3526 result.output_buffers = NULL;
3527 result.partial_result = requestIter->partial_result_cnt;
3528
3529 {
3530 Mutex::Autolock l(gHdrPlusClientLock);
3531 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3532 // Notify HDR+ client about the partial metadata.
3533 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3534 result.partial_result == PARTIAL_RESULT_COUNT);
3535 }
3536 }
3537
3538 orchestrateResult(&result);
3539 LOGD("urgent frame_number = %u", result.frame_number);
3540 free_camera_metadata((camera_metadata_t *)result.result);
3541}
3542
Thierry Strudel3d639192016-09-09 11:52:26 -07003543/*===========================================================================
3544 * FUNCTION : handleMetadataWithLock
3545 *
3546 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3547 *
3548 * PARAMETERS : @metadata_buf: metadata buffer
3549 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3550 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003551 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3552 * last urgent metadata in a batch. Always true for non-batch mode
3553 * @lastMetadataInBatch: Boolean to indicate whether this is the
3554 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003555 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3556 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003557 *
3558 * RETURN :
3559 *
3560 *==========================================================================*/
3561void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003562 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003563 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3564 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003565{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003566 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003567 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3568 //during flush do not send metadata from this thread
3569 LOGD("not sending metadata during flush or when mState is error");
3570 if (free_and_bufdone_meta_buf) {
3571 mMetadataChannel->bufDone(metadata_buf);
3572 free(metadata_buf);
3573 }
3574 return;
3575 }
3576
3577 //not in flush
3578 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3579 int32_t frame_number_valid, urgent_frame_number_valid;
3580 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003581 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003582 nsecs_t currentSysTime;
3583
3584 int32_t *p_frame_number_valid =
3585 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3586 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3587 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003588 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003589 int32_t *p_urgent_frame_number_valid =
3590 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3591 uint32_t *p_urgent_frame_number =
3592 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3593 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3594 metadata) {
3595 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3596 *p_frame_number_valid, *p_frame_number);
3597 }
3598
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003599 camera_metadata_t *resultMetadata = nullptr;
3600
Thierry Strudel3d639192016-09-09 11:52:26 -07003601 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3602 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3603 LOGE("Invalid metadata");
3604 if (free_and_bufdone_meta_buf) {
3605 mMetadataChannel->bufDone(metadata_buf);
3606 free(metadata_buf);
3607 }
3608 goto done_metadata;
3609 }
3610 frame_number_valid = *p_frame_number_valid;
3611 frame_number = *p_frame_number;
3612 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003613 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003614 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3615 urgent_frame_number = *p_urgent_frame_number;
3616 currentSysTime = systemTime(CLOCK_MONOTONIC);
3617
Jason Lee603176d2017-05-31 11:43:27 -07003618 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3619 const int tries = 3;
3620 nsecs_t bestGap, measured;
3621 for (int i = 0; i < tries; ++i) {
3622 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3623 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3624 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3625 const nsecs_t gap = tmono2 - tmono;
3626 if (i == 0 || gap < bestGap) {
3627 bestGap = gap;
3628 measured = tbase - ((tmono + tmono2) >> 1);
3629 }
3630 }
3631 capture_time -= measured;
3632 }
3633
Thierry Strudel3d639192016-09-09 11:52:26 -07003634 // Detect if buffers from any requests are overdue
3635 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003636 int64_t timeout;
3637 {
3638 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3639 // If there is a pending HDR+ request, the following requests may be blocked until the
3640 // HDR+ request is done. So allow a longer timeout.
3641 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3642 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3643 }
3644
3645 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003646 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003647 assert(missed.stream->priv);
3648 if (missed.stream->priv) {
3649 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3650 assert(ch->mStreams[0]);
3651 if (ch->mStreams[0]) {
3652 LOGE("Cancel missing frame = %d, buffer = %p,"
3653 "stream type = %d, stream format = %d",
3654 req.frame_number, missed.buffer,
3655 ch->mStreams[0]->getMyType(), missed.stream->format);
3656 ch->timeoutFrame(req.frame_number);
3657 }
3658 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003659 }
3660 }
3661 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003662 //For the very first metadata callback, regardless whether it contains valid
3663 //frame number, send the partial metadata for the jumpstarting requests.
3664 //Note that this has to be done even if the metadata doesn't contain valid
3665 //urgent frame number, because in the case only 1 request is ever submitted
3666 //to HAL, there won't be subsequent valid urgent frame number.
3667 if (mFirstMetadataCallback) {
3668 for (pendingRequestIterator i =
3669 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3670 if (i->bUseFirstPartial) {
3671 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3672 }
3673 }
3674 mFirstMetadataCallback = false;
3675 }
3676
Thierry Strudel3d639192016-09-09 11:52:26 -07003677 //Partial result on process_capture_result for timestamp
3678 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003679 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003680
3681 //Recieved an urgent Frame Number, handle it
3682 //using partial results
3683 for (pendingRequestIterator i =
3684 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3685 LOGD("Iterator Frame = %d urgent frame = %d",
3686 i->frame_number, urgent_frame_number);
3687
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003688 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003689 (i->partial_result_cnt == 0)) {
3690 LOGE("Error: HAL missed urgent metadata for frame number %d",
3691 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003692 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 }
3694
3695 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003696 i->partial_result_cnt == 0) {
3697 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003698 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3699 // Instant AEC settled for this frame.
3700 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3701 mInstantAECSettledFrameNumber = urgent_frame_number;
3702 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003703 break;
3704 }
3705 }
3706 }
3707
3708 if (!frame_number_valid) {
3709 LOGD("Not a valid normal frame number, used as SOF only");
3710 if (free_and_bufdone_meta_buf) {
3711 mMetadataChannel->bufDone(metadata_buf);
3712 free(metadata_buf);
3713 }
3714 goto done_metadata;
3715 }
3716 LOGH("valid frame_number = %u, capture_time = %lld",
3717 frame_number, capture_time);
3718
Emilian Peev7650c122017-01-19 08:24:33 -08003719 if (metadata->is_depth_data_valid) {
3720 handleDepthDataLocked(metadata->depth_data, frame_number);
3721 }
3722
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003723 // Check whether any stream buffer corresponding to this is dropped or not
3724 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3725 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3726 for (auto & pendingRequest : mPendingRequestsList) {
3727 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3728 mInstantAECSettledFrameNumber)) {
3729 camera3_notify_msg_t notify_msg = {};
3730 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003731 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003732 QCamera3ProcessingChannel *channel =
3733 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003734 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003735 if (p_cam_frame_drop) {
3736 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003737 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003738 // Got the stream ID for drop frame.
3739 dropFrame = true;
3740 break;
3741 }
3742 }
3743 } else {
3744 // This is instant AEC case.
3745 // For instant AEC drop the stream untill AEC is settled.
3746 dropFrame = true;
3747 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003748
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003749 if (dropFrame) {
3750 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3751 if (p_cam_frame_drop) {
3752 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003753 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003754 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003755 } else {
3756 // For instant AEC, inform frame drop and frame number
3757 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3758 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003759 pendingRequest.frame_number, streamID,
3760 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003761 }
3762 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003763 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003764 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003765 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003766 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003767 if (p_cam_frame_drop) {
3768 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003769 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003770 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003771 } else {
3772 // For instant AEC, inform frame drop and frame number
3773 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3774 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003775 pendingRequest.frame_number, streamID,
3776 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003777 }
3778 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003779 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003780 PendingFrameDrop.stream_ID = streamID;
3781 // Add the Frame drop info to mPendingFrameDropList
3782 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003783 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003784 }
3785 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003787
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003788 for (auto & pendingRequest : mPendingRequestsList) {
3789 // Find the pending request with the frame number.
3790 if (pendingRequest.frame_number == frame_number) {
3791 // Update the sensor timestamp.
3792 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003793
Thierry Strudel3d639192016-09-09 11:52:26 -07003794
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003795 /* Set the timestamp in display metadata so that clients aware of
3796 private_handle such as VT can use this un-modified timestamps.
3797 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003798 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003799
Thierry Strudel3d639192016-09-09 11:52:26 -07003800 // Find channel requiring metadata, meaning internal offline postprocess
3801 // is needed.
3802 //TODO: for now, we don't support two streams requiring metadata at the same time.
3803 // (because we are not making copies, and metadata buffer is not reference counted.
3804 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003805 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3806 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003807 if (iter->need_metadata) {
3808 internalPproc = true;
3809 QCamera3ProcessingChannel *channel =
3810 (QCamera3ProcessingChannel *)iter->stream->priv;
3811 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003812 if(p_is_metabuf_queued != NULL) {
3813 *p_is_metabuf_queued = true;
3814 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003815 break;
3816 }
3817 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003818 for (auto itr = pendingRequest.internalRequestList.begin();
3819 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003820 if (itr->need_metadata) {
3821 internalPproc = true;
3822 QCamera3ProcessingChannel *channel =
3823 (QCamera3ProcessingChannel *)itr->stream->priv;
3824 channel->queueReprocMetadata(metadata_buf);
3825 break;
3826 }
3827 }
3828
Thierry Strudel54dc9782017-02-15 12:12:10 -08003829 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003830
3831 bool *enableZsl = nullptr;
3832 if (gExposeEnableZslKey) {
3833 enableZsl = &pendingRequest.enableZsl;
3834 }
3835
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003836 resultMetadata = translateFromHalMetadata(metadata,
3837 pendingRequest.timestamp, pendingRequest.request_id,
3838 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3839 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003840 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003841 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003842 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003843 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003844 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003845 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003846
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003847 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003848
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003849 if (pendingRequest.blob_request) {
3850 //Dump tuning metadata if enabled and available
3851 char prop[PROPERTY_VALUE_MAX];
3852 memset(prop, 0, sizeof(prop));
3853 property_get("persist.camera.dumpmetadata", prop, "0");
3854 int32_t enabled = atoi(prop);
3855 if (enabled && metadata->is_tuning_params_valid) {
3856 dumpMetadataToFile(metadata->tuning_params,
3857 mMetaFrameCount,
3858 enabled,
3859 "Snapshot",
3860 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003861 }
3862 }
3863
3864 if (!internalPproc) {
3865 LOGD("couldn't find need_metadata for this metadata");
3866 // Return metadata buffer
3867 if (free_and_bufdone_meta_buf) {
3868 mMetadataChannel->bufDone(metadata_buf);
3869 free(metadata_buf);
3870 }
3871 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003872
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003873 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003874 }
3875 }
3876
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003877 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3878
3879 // Try to send out capture result metadata.
3880 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003881 return;
3882
Thierry Strudel3d639192016-09-09 11:52:26 -07003883done_metadata:
3884 for (pendingRequestIterator i = mPendingRequestsList.begin();
3885 i != mPendingRequestsList.end() ;i++) {
3886 i->pipeline_depth++;
3887 }
3888 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3889 unblockRequestIfNecessary();
3890}
3891
3892/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003893 * FUNCTION : handleDepthDataWithLock
3894 *
3895 * DESCRIPTION: Handles incoming depth data
3896 *
3897 * PARAMETERS : @depthData : Depth data
3898 * @frameNumber: Frame number of the incoming depth data
3899 *
3900 * RETURN :
3901 *
3902 *==========================================================================*/
3903void QCamera3HardwareInterface::handleDepthDataLocked(
3904 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3905 uint32_t currentFrameNumber;
3906 buffer_handle_t *depthBuffer;
3907
3908 if (nullptr == mDepthChannel) {
3909 LOGE("Depth channel not present!");
3910 return;
3911 }
3912
3913 camera3_stream_buffer_t resultBuffer =
3914 {.acquire_fence = -1,
3915 .release_fence = -1,
3916 .status = CAMERA3_BUFFER_STATUS_OK,
3917 .buffer = nullptr,
3918 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003919 do {
3920 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3921 if (nullptr == depthBuffer) {
3922 break;
3923 }
3924
Emilian Peev7650c122017-01-19 08:24:33 -08003925 resultBuffer.buffer = depthBuffer;
3926 if (currentFrameNumber == frameNumber) {
3927 int32_t rc = mDepthChannel->populateDepthData(depthData,
3928 frameNumber);
3929 if (NO_ERROR != rc) {
3930 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3931 } else {
3932 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3933 }
3934 } else if (currentFrameNumber > frameNumber) {
3935 break;
3936 } else {
3937 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3938 {{currentFrameNumber, mDepthChannel->getStream(),
3939 CAMERA3_MSG_ERROR_BUFFER}}};
3940 orchestrateNotify(&notify_msg);
3941
3942 LOGE("Depth buffer for frame number: %d is missing "
3943 "returning back!", currentFrameNumber);
3944 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3945 }
3946 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003947 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003948 } while (currentFrameNumber < frameNumber);
3949}
3950
3951/*===========================================================================
3952 * FUNCTION : notifyErrorFoPendingDepthData
3953 *
3954 * DESCRIPTION: Returns error for any pending depth buffers
3955 *
3956 * PARAMETERS : depthCh - depth channel that needs to get flushed
3957 *
3958 * RETURN :
3959 *
3960 *==========================================================================*/
3961void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3962 QCamera3DepthChannel *depthCh) {
3963 uint32_t currentFrameNumber;
3964 buffer_handle_t *depthBuffer;
3965
3966 if (nullptr == depthCh) {
3967 return;
3968 }
3969
3970 camera3_notify_msg_t notify_msg =
3971 {.type = CAMERA3_MSG_ERROR,
3972 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3973 camera3_stream_buffer_t resultBuffer =
3974 {.acquire_fence = -1,
3975 .release_fence = -1,
3976 .buffer = nullptr,
3977 .stream = depthCh->getStream(),
3978 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003979
3980 while (nullptr !=
3981 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3982 depthCh->unmapBuffer(currentFrameNumber);
3983
3984 notify_msg.message.error.frame_number = currentFrameNumber;
3985 orchestrateNotify(&notify_msg);
3986
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003987 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003988 };
3989}
3990
3991/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003992 * FUNCTION : hdrPlusPerfLock
3993 *
3994 * DESCRIPTION: perf lock for HDR+ using custom intent
3995 *
3996 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3997 *
3998 * RETURN : None
3999 *
4000 *==========================================================================*/
4001void QCamera3HardwareInterface::hdrPlusPerfLock(
4002 mm_camera_super_buf_t *metadata_buf)
4003{
4004 if (NULL == metadata_buf) {
4005 LOGE("metadata_buf is NULL");
4006 return;
4007 }
4008 metadata_buffer_t *metadata =
4009 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4010 int32_t *p_frame_number_valid =
4011 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4012 uint32_t *p_frame_number =
4013 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4014
4015 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4016 LOGE("%s: Invalid metadata", __func__);
4017 return;
4018 }
4019
4020 //acquire perf lock for 5 sec after the last HDR frame is captured
4021 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4022 if ((p_frame_number != NULL) &&
4023 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004024 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004025 }
4026 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004027}
4028
4029/*===========================================================================
4030 * FUNCTION : handleInputBufferWithLock
4031 *
4032 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4033 *
4034 * PARAMETERS : @frame_number: frame number of the input buffer
4035 *
4036 * RETURN :
4037 *
4038 *==========================================================================*/
4039void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4040{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004041 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004042 pendingRequestIterator i = mPendingRequestsList.begin();
4043 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4044 i++;
4045 }
4046 if (i != mPendingRequestsList.end() && i->input_buffer) {
4047 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004048 CameraMetadata settings;
4049 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4050 if(i->settings) {
4051 settings = i->settings;
4052 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4053 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004054 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004055 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004056 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004057 } else {
4058 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004059 }
4060
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004061 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4062 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4063 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004064
4065 camera3_capture_result result;
4066 memset(&result, 0, sizeof(camera3_capture_result));
4067 result.frame_number = frame_number;
4068 result.result = i->settings;
4069 result.input_buffer = i->input_buffer;
4070 result.partial_result = PARTIAL_RESULT_COUNT;
4071
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004072 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004073 LOGD("Input request metadata and input buffer frame_number = %u",
4074 i->frame_number);
4075 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004076
4077 // Dispatch result metadata that may be just unblocked by this reprocess result.
4078 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004079 } else {
4080 LOGE("Could not find input request for frame number %d", frame_number);
4081 }
4082}
4083
4084/*===========================================================================
4085 * FUNCTION : handleBufferWithLock
4086 *
4087 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4088 *
4089 * PARAMETERS : @buffer: image buffer for the callback
4090 * @frame_number: frame number of the image buffer
4091 *
4092 * RETURN :
4093 *
4094 *==========================================================================*/
4095void QCamera3HardwareInterface::handleBufferWithLock(
4096 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4097{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004098 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004099
4100 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4101 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4102 }
4103
Thierry Strudel3d639192016-09-09 11:52:26 -07004104 /* Nothing to be done during error state */
4105 if ((ERROR == mState) || (DEINIT == mState)) {
4106 return;
4107 }
4108 if (mFlushPerf) {
4109 handleBuffersDuringFlushLock(buffer);
4110 return;
4111 }
4112 //not in flush
4113 // If the frame number doesn't exist in the pending request list,
4114 // directly send the buffer to the frameworks, and update pending buffers map
4115 // Otherwise, book-keep the buffer.
4116 pendingRequestIterator i = mPendingRequestsList.begin();
4117 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4118 i++;
4119 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004120
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004121 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004122 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004123 // For a reprocessing request, try to send out result metadata.
4124 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004125 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004126 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004127
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004128 // Check if this frame was dropped.
4129 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4130 m != mPendingFrameDropList.end(); m++) {
4131 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4132 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4133 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4134 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4135 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4136 frame_number, streamID);
4137 m = mPendingFrameDropList.erase(m);
4138 break;
4139 }
4140 }
4141
4142 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4143 LOGH("result frame_number = %d, buffer = %p",
4144 frame_number, buffer->buffer);
4145
4146 mPendingBuffersMap.removeBuf(buffer->buffer);
4147 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4148
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004149 if (mPreviewStarted == false) {
4150 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4151 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004152 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4153
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004154 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4155 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4156 mPreviewStarted = true;
4157
4158 // Set power hint for preview
4159 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4160 }
4161 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004162}
4163
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004164void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004165 const camera_metadata_t *resultMetadata)
4166{
4167 // Find the pending request for this result metadata.
4168 auto requestIter = mPendingRequestsList.begin();
4169 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4170 requestIter++;
4171 }
4172
4173 if (requestIter == mPendingRequestsList.end()) {
4174 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4175 return;
4176 }
4177
4178 // Update the result metadata
4179 requestIter->resultMetadata = resultMetadata;
4180
4181 // Check what type of request this is.
4182 bool liveRequest = false;
4183 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004184 // HDR+ request doesn't have partial results.
4185 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004186 } else if (requestIter->input_buffer != nullptr) {
4187 // Reprocessing request result is the same as settings.
4188 requestIter->resultMetadata = requestIter->settings;
4189 // Reprocessing request doesn't have partial results.
4190 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4191 } else {
4192 liveRequest = true;
4193 requestIter->partial_result_cnt++;
4194 mPendingLiveRequest--;
4195
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004196 {
4197 Mutex::Autolock l(gHdrPlusClientLock);
4198 // For a live request, send the metadata to HDR+ client.
4199 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4200 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4201 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4202 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004203 }
4204 }
4205
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004206 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4207}
4208
4209void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4210 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004211 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4212 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004213 bool readyToSend = true;
4214
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004215 // Iterate through the pending requests to send out result metadata that are ready. Also if
4216 // this result metadata belongs to a live request, notify errors for previous live requests
4217 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004218 auto iter = mPendingRequestsList.begin();
4219 while (iter != mPendingRequestsList.end()) {
4220 // Check if current pending request is ready. If it's not ready, the following pending
4221 // requests are also not ready.
4222 if (readyToSend && iter->resultMetadata == nullptr) {
4223 readyToSend = false;
4224 }
4225
4226 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4227
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004228 camera3_capture_result_t result = {};
4229 result.frame_number = iter->frame_number;
4230 result.result = iter->resultMetadata;
4231 result.partial_result = iter->partial_result_cnt;
4232
4233 // If this pending buffer has result metadata, we may be able to send out shutter callback
4234 // and result metadata.
4235 if (iter->resultMetadata != nullptr) {
4236 if (!readyToSend) {
4237 // If any of the previous pending request is not ready, this pending request is
4238 // also not ready to send in order to keep shutter callbacks and result metadata
4239 // in order.
4240 iter++;
4241 continue;
4242 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004243 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004244 // If the result metadata belongs to a live request, notify errors for previous pending
4245 // live requests.
4246 mPendingLiveRequest--;
4247
4248 CameraMetadata dummyMetadata;
4249 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4250 result.result = dummyMetadata.release();
4251
4252 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004253
4254 // partial_result should be PARTIAL_RESULT_CNT in case of
4255 // ERROR_RESULT.
4256 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4257 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004258 } else {
4259 iter++;
4260 continue;
4261 }
4262
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004263 result.output_buffers = nullptr;
4264 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004265 orchestrateResult(&result);
4266
4267 // For reprocessing, result metadata is the same as settings so do not free it here to
4268 // avoid double free.
4269 if (result.result != iter->settings) {
4270 free_camera_metadata((camera_metadata_t *)result.result);
4271 }
4272 iter->resultMetadata = nullptr;
4273 iter = erasePendingRequest(iter);
4274 }
4275
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004276 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004277 for (auto &iter : mPendingRequestsList) {
4278 // Increment pipeline depth for the following pending requests.
4279 if (iter.frame_number > frameNumber) {
4280 iter.pipeline_depth++;
4281 }
4282 }
4283 }
4284
4285 unblockRequestIfNecessary();
4286}
4287
Thierry Strudel3d639192016-09-09 11:52:26 -07004288/*===========================================================================
4289 * FUNCTION : unblockRequestIfNecessary
4290 *
4291 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4292 * that mMutex is held when this function is called.
4293 *
4294 * PARAMETERS :
4295 *
4296 * RETURN :
4297 *
4298 *==========================================================================*/
4299void QCamera3HardwareInterface::unblockRequestIfNecessary()
4300{
4301 // Unblock process_capture_request
4302 pthread_cond_signal(&mRequestCond);
4303}
4304
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004305/*===========================================================================
4306 * FUNCTION : isHdrSnapshotRequest
4307 *
4308 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4309 *
4310 * PARAMETERS : camera3 request structure
4311 *
4312 * RETURN : boolean decision variable
4313 *
4314 *==========================================================================*/
4315bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4316{
4317 if (request == NULL) {
4318 LOGE("Invalid request handle");
4319 assert(0);
4320 return false;
4321 }
4322
4323 if (!mForceHdrSnapshot) {
4324 CameraMetadata frame_settings;
4325 frame_settings = request->settings;
4326
4327 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4328 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4329 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4330 return false;
4331 }
4332 } else {
4333 return false;
4334 }
4335
4336 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4337 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4338 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4339 return false;
4340 }
4341 } else {
4342 return false;
4343 }
4344 }
4345
4346 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4347 if (request->output_buffers[i].stream->format
4348 == HAL_PIXEL_FORMAT_BLOB) {
4349 return true;
4350 }
4351 }
4352
4353 return false;
4354}
4355/*===========================================================================
4356 * FUNCTION : orchestrateRequest
4357 *
4358 * DESCRIPTION: Orchestrates a capture request from camera service
4359 *
4360 * PARAMETERS :
4361 * @request : request from framework to process
4362 *
4363 * RETURN : Error status codes
4364 *
4365 *==========================================================================*/
4366int32_t QCamera3HardwareInterface::orchestrateRequest(
4367 camera3_capture_request_t *request)
4368{
4369
4370 uint32_t originalFrameNumber = request->frame_number;
4371 uint32_t originalOutputCount = request->num_output_buffers;
4372 const camera_metadata_t *original_settings = request->settings;
4373 List<InternalRequest> internallyRequestedStreams;
4374 List<InternalRequest> emptyInternalList;
4375
4376 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4377 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4378 uint32_t internalFrameNumber;
4379 CameraMetadata modified_meta;
4380
4381
4382 /* Add Blob channel to list of internally requested streams */
4383 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4384 if (request->output_buffers[i].stream->format
4385 == HAL_PIXEL_FORMAT_BLOB) {
4386 InternalRequest streamRequested;
4387 streamRequested.meteringOnly = 1;
4388 streamRequested.need_metadata = 0;
4389 streamRequested.stream = request->output_buffers[i].stream;
4390 internallyRequestedStreams.push_back(streamRequested);
4391 }
4392 }
4393 request->num_output_buffers = 0;
4394 auto itr = internallyRequestedStreams.begin();
4395
4396 /* Modify setting to set compensation */
4397 modified_meta = request->settings;
4398 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4399 uint8_t aeLock = 1;
4400 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4401 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4402 camera_metadata_t *modified_settings = modified_meta.release();
4403 request->settings = modified_settings;
4404
4405 /* Capture Settling & -2x frame */
4406 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4407 request->frame_number = internalFrameNumber;
4408 processCaptureRequest(request, internallyRequestedStreams);
4409
4410 request->num_output_buffers = originalOutputCount;
4411 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4412 request->frame_number = internalFrameNumber;
4413 processCaptureRequest(request, emptyInternalList);
4414 request->num_output_buffers = 0;
4415
4416 modified_meta = modified_settings;
4417 expCompensation = 0;
4418 aeLock = 1;
4419 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4420 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4421 modified_settings = modified_meta.release();
4422 request->settings = modified_settings;
4423
4424 /* Capture Settling & 0X frame */
4425
4426 itr = internallyRequestedStreams.begin();
4427 if (itr == internallyRequestedStreams.end()) {
4428 LOGE("Error Internally Requested Stream list is empty");
4429 assert(0);
4430 } else {
4431 itr->need_metadata = 0;
4432 itr->meteringOnly = 1;
4433 }
4434
4435 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4436 request->frame_number = internalFrameNumber;
4437 processCaptureRequest(request, internallyRequestedStreams);
4438
4439 itr = internallyRequestedStreams.begin();
4440 if (itr == internallyRequestedStreams.end()) {
4441 ALOGE("Error Internally Requested Stream list is empty");
4442 assert(0);
4443 } else {
4444 itr->need_metadata = 1;
4445 itr->meteringOnly = 0;
4446 }
4447
4448 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4449 request->frame_number = internalFrameNumber;
4450 processCaptureRequest(request, internallyRequestedStreams);
4451
4452 /* Capture 2X frame*/
4453 modified_meta = modified_settings;
4454 expCompensation = GB_HDR_2X_STEP_EV;
4455 aeLock = 1;
4456 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4457 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4458 modified_settings = modified_meta.release();
4459 request->settings = modified_settings;
4460
4461 itr = internallyRequestedStreams.begin();
4462 if (itr == internallyRequestedStreams.end()) {
4463 ALOGE("Error Internally Requested Stream list is empty");
4464 assert(0);
4465 } else {
4466 itr->need_metadata = 0;
4467 itr->meteringOnly = 1;
4468 }
4469 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4470 request->frame_number = internalFrameNumber;
4471 processCaptureRequest(request, internallyRequestedStreams);
4472
4473 itr = internallyRequestedStreams.begin();
4474 if (itr == internallyRequestedStreams.end()) {
4475 ALOGE("Error Internally Requested Stream list is empty");
4476 assert(0);
4477 } else {
4478 itr->need_metadata = 1;
4479 itr->meteringOnly = 0;
4480 }
4481
4482 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4483 request->frame_number = internalFrameNumber;
4484 processCaptureRequest(request, internallyRequestedStreams);
4485
4486
4487 /* Capture 2X on original streaming config*/
4488 internallyRequestedStreams.clear();
4489
4490 /* Restore original settings pointer */
4491 request->settings = original_settings;
4492 } else {
4493 uint32_t internalFrameNumber;
4494 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4495 request->frame_number = internalFrameNumber;
4496 return processCaptureRequest(request, internallyRequestedStreams);
4497 }
4498
4499 return NO_ERROR;
4500}
4501
4502/*===========================================================================
4503 * FUNCTION : orchestrateResult
4504 *
4505 * DESCRIPTION: Orchestrates a capture result to camera service
4506 *
4507 * PARAMETERS :
4508 * @request : request from framework to process
4509 *
4510 * RETURN :
4511 *
4512 *==========================================================================*/
4513void QCamera3HardwareInterface::orchestrateResult(
4514 camera3_capture_result_t *result)
4515{
4516 uint32_t frameworkFrameNumber;
4517 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4518 frameworkFrameNumber);
4519 if (rc != NO_ERROR) {
4520 LOGE("Cannot find translated frameworkFrameNumber");
4521 assert(0);
4522 } else {
4523 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004524 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004525 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004526 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004527 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4528 camera_metadata_entry_t entry;
4529 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4530 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004531 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004532 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4533 if (ret != OK)
4534 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004535 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004536 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004537 result->frame_number = frameworkFrameNumber;
4538 mCallbackOps->process_capture_result(mCallbackOps, result);
4539 }
4540 }
4541}
4542
4543/*===========================================================================
4544 * FUNCTION : orchestrateNotify
4545 *
4546 * DESCRIPTION: Orchestrates a notify to camera service
4547 *
4548 * PARAMETERS :
4549 * @request : request from framework to process
4550 *
4551 * RETURN :
4552 *
4553 *==========================================================================*/
4554void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4555{
4556 uint32_t frameworkFrameNumber;
4557 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004558 int32_t rc = NO_ERROR;
4559
4560 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004561 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004562
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004563 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004564 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4565 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4566 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004567 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004568 LOGE("Cannot find translated frameworkFrameNumber");
4569 assert(0);
4570 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004571 }
4572 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004573
4574 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4575 LOGD("Internal Request drop the notifyCb");
4576 } else {
4577 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4578 mCallbackOps->notify(mCallbackOps, notify_msg);
4579 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004580}
4581
4582/*===========================================================================
4583 * FUNCTION : FrameNumberRegistry
4584 *
4585 * DESCRIPTION: Constructor
4586 *
4587 * PARAMETERS :
4588 *
4589 * RETURN :
4590 *
4591 *==========================================================================*/
4592FrameNumberRegistry::FrameNumberRegistry()
4593{
4594 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4595}
4596
4597/*===========================================================================
4598 * FUNCTION : ~FrameNumberRegistry
4599 *
4600 * DESCRIPTION: Destructor
4601 *
4602 * PARAMETERS :
4603 *
4604 * RETURN :
4605 *
4606 *==========================================================================*/
4607FrameNumberRegistry::~FrameNumberRegistry()
4608{
4609}
4610
4611/*===========================================================================
4612 * FUNCTION : PurgeOldEntriesLocked
4613 *
4614 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4615 *
4616 * PARAMETERS :
4617 *
4618 * RETURN : NONE
4619 *
4620 *==========================================================================*/
4621void FrameNumberRegistry::purgeOldEntriesLocked()
4622{
4623 while (_register.begin() != _register.end()) {
4624 auto itr = _register.begin();
4625 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4626 _register.erase(itr);
4627 } else {
4628 return;
4629 }
4630 }
4631}
4632
4633/*===========================================================================
4634 * FUNCTION : allocStoreInternalFrameNumber
4635 *
4636 * DESCRIPTION: Method to note down a framework request and associate a new
4637 * internal request number against it
4638 *
4639 * PARAMETERS :
4640 * @fFrameNumber: Identifier given by framework
4641 * @internalFN : Output parameter which will have the newly generated internal
4642 * entry
4643 *
4644 * RETURN : Error code
4645 *
4646 *==========================================================================*/
4647int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4648 uint32_t &internalFrameNumber)
4649{
4650 Mutex::Autolock lock(mRegistryLock);
4651 internalFrameNumber = _nextFreeInternalNumber++;
4652 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4653 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4654 purgeOldEntriesLocked();
4655 return NO_ERROR;
4656}
4657
4658/*===========================================================================
4659 * FUNCTION : generateStoreInternalFrameNumber
4660 *
4661 * DESCRIPTION: Method to associate a new internal request number independent
4662 * of any associate with framework requests
4663 *
4664 * PARAMETERS :
4665 * @internalFrame#: Output parameter which will have the newly generated internal
4666 *
4667 *
4668 * RETURN : Error code
4669 *
4670 *==========================================================================*/
4671int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4672{
4673 Mutex::Autolock lock(mRegistryLock);
4674 internalFrameNumber = _nextFreeInternalNumber++;
4675 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4676 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4677 purgeOldEntriesLocked();
4678 return NO_ERROR;
4679}
4680
4681/*===========================================================================
4682 * FUNCTION : getFrameworkFrameNumber
4683 *
4684 * DESCRIPTION: Method to query the framework framenumber given an internal #
4685 *
4686 * PARAMETERS :
4687 * @internalFrame#: Internal reference
4688 * @frameworkframenumber: Output parameter holding framework frame entry
4689 *
4690 * RETURN : Error code
4691 *
4692 *==========================================================================*/
4693int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4694 uint32_t &frameworkFrameNumber)
4695{
4696 Mutex::Autolock lock(mRegistryLock);
4697 auto itr = _register.find(internalFrameNumber);
4698 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004699 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004700 return -ENOENT;
4701 }
4702
4703 frameworkFrameNumber = itr->second;
4704 purgeOldEntriesLocked();
4705 return NO_ERROR;
4706}
Thierry Strudel3d639192016-09-09 11:52:26 -07004707
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004708status_t QCamera3HardwareInterface::fillPbStreamConfig(
4709 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4710 QCamera3Channel *channel, uint32_t streamIndex) {
4711 if (config == nullptr) {
4712 LOGE("%s: config is null", __FUNCTION__);
4713 return BAD_VALUE;
4714 }
4715
4716 if (channel == nullptr) {
4717 LOGE("%s: channel is null", __FUNCTION__);
4718 return BAD_VALUE;
4719 }
4720
4721 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4722 if (stream == nullptr) {
4723 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4724 return NAME_NOT_FOUND;
4725 }
4726
4727 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4728 if (streamInfo == nullptr) {
4729 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4730 return NAME_NOT_FOUND;
4731 }
4732
4733 config->id = pbStreamId;
4734 config->image.width = streamInfo->dim.width;
4735 config->image.height = streamInfo->dim.height;
4736 config->image.padding = 0;
4737 config->image.format = pbStreamFormat;
4738
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004739 uint32_t totalPlaneSize = 0;
4740
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004741 // Fill plane information.
4742 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4743 pbcamera::PlaneConfiguration plane;
4744 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4745 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4746 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004747
4748 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004749 }
4750
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004751 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004752 return OK;
4753}
4754
Thierry Strudel3d639192016-09-09 11:52:26 -07004755/*===========================================================================
4756 * FUNCTION : processCaptureRequest
4757 *
4758 * DESCRIPTION: process a capture request from camera service
4759 *
4760 * PARAMETERS :
4761 * @request : request from framework to process
4762 *
4763 * RETURN :
4764 *
4765 *==========================================================================*/
4766int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004767 camera3_capture_request_t *request,
4768 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004769{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004770 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004771 int rc = NO_ERROR;
4772 int32_t request_id;
4773 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004774 bool isVidBufRequested = false;
4775 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004776 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004777
4778 pthread_mutex_lock(&mMutex);
4779
4780 // Validate current state
4781 switch (mState) {
4782 case CONFIGURED:
4783 case STARTED:
4784 /* valid state */
4785 break;
4786
4787 case ERROR:
4788 pthread_mutex_unlock(&mMutex);
4789 handleCameraDeviceError();
4790 return -ENODEV;
4791
4792 default:
4793 LOGE("Invalid state %d", mState);
4794 pthread_mutex_unlock(&mMutex);
4795 return -ENODEV;
4796 }
4797
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004798 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004799 if (rc != NO_ERROR) {
4800 LOGE("incoming request is not valid");
4801 pthread_mutex_unlock(&mMutex);
4802 return rc;
4803 }
4804
4805 meta = request->settings;
4806
4807 // For first capture request, send capture intent, and
4808 // stream on all streams
4809 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004810 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004811 // send an unconfigure to the backend so that the isp
4812 // resources are deallocated
4813 if (!mFirstConfiguration) {
4814 cam_stream_size_info_t stream_config_info;
4815 int32_t hal_version = CAM_HAL_V3;
4816 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4817 stream_config_info.buffer_info.min_buffers =
4818 MIN_INFLIGHT_REQUESTS;
4819 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004820 m_bIs4KVideo ? 0 :
4821 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004822 clear_metadata_buffer(mParameters);
4823 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4824 CAM_INTF_PARM_HAL_VERSION, hal_version);
4825 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4826 CAM_INTF_META_STREAM_INFO, stream_config_info);
4827 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4828 mParameters);
4829 if (rc < 0) {
4830 LOGE("set_parms for unconfigure failed");
4831 pthread_mutex_unlock(&mMutex);
4832 return rc;
4833 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004834
Thierry Strudel3d639192016-09-09 11:52:26 -07004835 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004836 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004837 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004838 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004839 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004840 property_get("persist.camera.is_type", is_type_value, "4");
4841 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4842 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4843 property_get("persist.camera.is_type_preview", is_type_value, "4");
4844 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4845 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004846
4847 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4848 int32_t hal_version = CAM_HAL_V3;
4849 uint8_t captureIntent =
4850 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4851 mCaptureIntent = captureIntent;
4852 clear_metadata_buffer(mParameters);
4853 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4854 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4855 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004856 if (mFirstConfiguration) {
4857 // configure instant AEC
4858 // Instant AEC is a session based parameter and it is needed only
4859 // once per complete session after open camera.
4860 // i.e. This is set only once for the first capture request, after open camera.
4861 setInstantAEC(meta);
4862 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004863 uint8_t fwkVideoStabMode=0;
4864 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4865 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4866 }
4867
Xue Tuecac74e2017-04-17 13:58:15 -07004868 // If EIS setprop is enabled then only turn it on for video/preview
4869 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004870 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004871 int32_t vsMode;
4872 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4874 rc = BAD_VALUE;
4875 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004876 LOGD("setEis %d", setEis);
4877 bool eis3Supported = false;
4878 size_t count = IS_TYPE_MAX;
4879 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4880 for (size_t i = 0; i < count; i++) {
4881 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4882 eis3Supported = true;
4883 break;
4884 }
4885 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004886
4887 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004888 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004889 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4890 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004891 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4892 is_type = isTypePreview;
4893 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4894 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4895 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004896 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004897 } else {
4898 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004899 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004900 } else {
4901 is_type = IS_TYPE_NONE;
4902 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004904 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004905 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4906 }
4907 }
4908
4909 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4910 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4911
Thierry Strudel54dc9782017-02-15 12:12:10 -08004912 //Disable tintless only if the property is set to 0
4913 memset(prop, 0, sizeof(prop));
4914 property_get("persist.camera.tintless.enable", prop, "1");
4915 int32_t tintless_value = atoi(prop);
4916
Thierry Strudel3d639192016-09-09 11:52:26 -07004917 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4918 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004919
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 //Disable CDS for HFR mode or if DIS/EIS is on.
4921 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4922 //after every configure_stream
4923 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4924 (m_bIsVideo)) {
4925 int32_t cds = CAM_CDS_MODE_OFF;
4926 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4927 CAM_INTF_PARM_CDS_MODE, cds))
4928 LOGE("Failed to disable CDS for HFR mode");
4929
4930 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004931
4932 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4933 uint8_t* use_av_timer = NULL;
4934
4935 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004936 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937 use_av_timer = &m_debug_avtimer;
4938 }
4939 else{
4940 use_av_timer =
4941 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004942 if (use_av_timer) {
4943 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4944 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004945 }
4946
4947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4948 rc = BAD_VALUE;
4949 }
4950 }
4951
Thierry Strudel3d639192016-09-09 11:52:26 -07004952 setMobicat();
4953
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004954 uint8_t nrMode = 0;
4955 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4956 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4957 }
4958
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 /* Set fps and hfr mode while sending meta stream info so that sensor
4960 * can configure appropriate streaming mode */
4961 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004962 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4963 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004964 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4965 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004966 if (rc == NO_ERROR) {
4967 int32_t max_fps =
4968 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004969 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004970 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4971 }
4972 /* For HFR, more buffers are dequeued upfront to improve the performance */
4973 if (mBatchSize) {
4974 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4975 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4976 }
4977 }
4978 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004979 LOGE("setHalFpsRange failed");
4980 }
4981 }
4982 if (meta.exists(ANDROID_CONTROL_MODE)) {
4983 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4984 rc = extractSceneMode(meta, metaMode, mParameters);
4985 if (rc != NO_ERROR) {
4986 LOGE("extractSceneMode failed");
4987 }
4988 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004989 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004990
Thierry Strudel04e026f2016-10-10 11:27:36 -07004991 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4992 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4993 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4994 rc = setVideoHdrMode(mParameters, vhdr);
4995 if (rc != NO_ERROR) {
4996 LOGE("setVideoHDR is failed");
4997 }
4998 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004999
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005000 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005001 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005002 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005003 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5005 sensorModeFullFov)) {
5006 rc = BAD_VALUE;
5007 }
5008 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 //TODO: validate the arguments, HSV scenemode should have only the
5010 //advertised fps ranges
5011
5012 /*set the capture intent, hal version, tintless, stream info,
5013 *and disenable parameters to the backend*/
5014 LOGD("set_parms META_STREAM_INFO " );
5015 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005016 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5017 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005018 mStreamConfigInfo.type[i],
5019 mStreamConfigInfo.stream_sizes[i].width,
5020 mStreamConfigInfo.stream_sizes[i].height,
5021 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005022 mStreamConfigInfo.format[i],
5023 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005024 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005025
Thierry Strudel3d639192016-09-09 11:52:26 -07005026 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5027 mParameters);
5028 if (rc < 0) {
5029 LOGE("set_parms failed for hal version, stream info");
5030 }
5031
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005032 cam_sensor_mode_info_t sensorModeInfo = {};
5033 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005034 if (rc != NO_ERROR) {
5035 LOGE("Failed to get sensor output size");
5036 pthread_mutex_unlock(&mMutex);
5037 goto error_exit;
5038 }
5039
5040 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5041 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005042 sensorModeInfo.active_array_size.width,
5043 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005044
5045 /* Set batchmode before initializing channel. Since registerBuffer
5046 * internally initializes some of the channels, better set batchmode
5047 * even before first register buffer */
5048 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5049 it != mStreamInfo.end(); it++) {
5050 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5051 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5052 && mBatchSize) {
5053 rc = channel->setBatchSize(mBatchSize);
5054 //Disable per frame map unmap for HFR/batchmode case
5055 rc |= channel->setPerFrameMapUnmap(false);
5056 if (NO_ERROR != rc) {
5057 LOGE("Channel init failed %d", rc);
5058 pthread_mutex_unlock(&mMutex);
5059 goto error_exit;
5060 }
5061 }
5062 }
5063
5064 //First initialize all streams
5065 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5066 it != mStreamInfo.end(); it++) {
5067 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005068
5069 /* Initial value of NR mode is needed before stream on */
5070 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005071 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5072 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005073 setEis) {
5074 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5075 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5076 is_type = mStreamConfigInfo.is_type[i];
5077 break;
5078 }
5079 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005080 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005081 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005082 rc = channel->initialize(IS_TYPE_NONE);
5083 }
5084 if (NO_ERROR != rc) {
5085 LOGE("Channel initialization failed %d", rc);
5086 pthread_mutex_unlock(&mMutex);
5087 goto error_exit;
5088 }
5089 }
5090
5091 if (mRawDumpChannel) {
5092 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5093 if (rc != NO_ERROR) {
5094 LOGE("Error: Raw Dump Channel init failed");
5095 pthread_mutex_unlock(&mMutex);
5096 goto error_exit;
5097 }
5098 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005099 if (mHdrPlusRawSrcChannel) {
5100 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5101 if (rc != NO_ERROR) {
5102 LOGE("Error: HDR+ RAW Source Channel init failed");
5103 pthread_mutex_unlock(&mMutex);
5104 goto error_exit;
5105 }
5106 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005107 if (mSupportChannel) {
5108 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5109 if (rc < 0) {
5110 LOGE("Support channel initialization failed");
5111 pthread_mutex_unlock(&mMutex);
5112 goto error_exit;
5113 }
5114 }
5115 if (mAnalysisChannel) {
5116 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5117 if (rc < 0) {
5118 LOGE("Analysis channel initialization failed");
5119 pthread_mutex_unlock(&mMutex);
5120 goto error_exit;
5121 }
5122 }
5123 if (mDummyBatchChannel) {
5124 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5125 if (rc < 0) {
5126 LOGE("mDummyBatchChannel setBatchSize failed");
5127 pthread_mutex_unlock(&mMutex);
5128 goto error_exit;
5129 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005130 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005131 if (rc < 0) {
5132 LOGE("mDummyBatchChannel initialization failed");
5133 pthread_mutex_unlock(&mMutex);
5134 goto error_exit;
5135 }
5136 }
5137
5138 // Set bundle info
5139 rc = setBundleInfo();
5140 if (rc < 0) {
5141 LOGE("setBundleInfo failed %d", rc);
5142 pthread_mutex_unlock(&mMutex);
5143 goto error_exit;
5144 }
5145
5146 //update settings from app here
5147 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5148 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5149 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5150 }
5151 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5152 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5153 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5154 }
5155 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5156 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5157 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5158
5159 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5160 (mLinkedCameraId != mCameraId) ) {
5161 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5162 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005163 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005164 goto error_exit;
5165 }
5166 }
5167
5168 // add bundle related cameras
5169 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5170 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005171 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5172 &m_pDualCamCmdPtr->bundle_info;
5173 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 if (mIsDeviceLinked)
5175 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5176 else
5177 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5178
5179 pthread_mutex_lock(&gCamLock);
5180
5181 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5182 LOGE("Dualcam: Invalid Session Id ");
5183 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005184 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005185 goto error_exit;
5186 }
5187
5188 if (mIsMainCamera == 1) {
5189 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5190 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005191 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005192 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005193 // related session id should be session id of linked session
5194 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5195 } else {
5196 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5197 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005198 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005199 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005200 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5201 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005202 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005203 pthread_mutex_unlock(&gCamLock);
5204
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005205 rc = mCameraHandle->ops->set_dual_cam_cmd(
5206 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005207 if (rc < 0) {
5208 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005209 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005210 goto error_exit;
5211 }
5212 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005213 goto no_error;
5214error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005215 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005216 return rc;
5217no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005218 mWokenUpByDaemon = false;
5219 mPendingLiveRequest = 0;
5220 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005221 }
5222
5223 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005224 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005225
5226 if (mFlushPerf) {
5227 //we cannot accept any requests during flush
5228 LOGE("process_capture_request cannot proceed during flush");
5229 pthread_mutex_unlock(&mMutex);
5230 return NO_ERROR; //should return an error
5231 }
5232
5233 if (meta.exists(ANDROID_REQUEST_ID)) {
5234 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5235 mCurrentRequestId = request_id;
5236 LOGD("Received request with id: %d", request_id);
5237 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5238 LOGE("Unable to find request id field, \
5239 & no previous id available");
5240 pthread_mutex_unlock(&mMutex);
5241 return NAME_NOT_FOUND;
5242 } else {
5243 LOGD("Re-using old request id");
5244 request_id = mCurrentRequestId;
5245 }
5246
5247 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5248 request->num_output_buffers,
5249 request->input_buffer,
5250 frameNumber);
5251 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005252 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005254 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005255 uint32_t snapshotStreamId = 0;
5256 for (size_t i = 0; i < request->num_output_buffers; i++) {
5257 const camera3_stream_buffer_t& output = request->output_buffers[i];
5258 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5259
Emilian Peev7650c122017-01-19 08:24:33 -08005260 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5261 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005262 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005263 blob_request = 1;
5264 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5265 }
5266
5267 if (output.acquire_fence != -1) {
5268 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5269 close(output.acquire_fence);
5270 if (rc != OK) {
5271 LOGE("sync wait failed %d", rc);
5272 pthread_mutex_unlock(&mMutex);
5273 return rc;
5274 }
5275 }
5276
Emilian Peev0f3c3162017-03-15 12:57:46 +00005277 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5278 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005279 depthRequestPresent = true;
5280 continue;
5281 }
5282
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005283 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005285
5286 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5287 isVidBufRequested = true;
5288 }
5289 }
5290
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005291 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5292 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5293 itr++) {
5294 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5295 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5296 channel->getStreamID(channel->getStreamTypeMask());
5297
5298 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5299 isVidBufRequested = true;
5300 }
5301 }
5302
Thierry Strudel3d639192016-09-09 11:52:26 -07005303 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005304 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005305 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005306 }
5307 if (blob_request && mRawDumpChannel) {
5308 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005309 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005311 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005312 }
5313
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005314 {
5315 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5316 // Request a RAW buffer if
5317 // 1. mHdrPlusRawSrcChannel is valid.
5318 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5319 // 3. There is no pending HDR+ request.
5320 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5321 mHdrPlusPendingRequests.size() == 0) {
5322 streamsArray.stream_request[streamsArray.num_streams].streamID =
5323 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5324 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5325 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005326 }
5327
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005328 //extract capture intent
5329 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5330 mCaptureIntent =
5331 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5332 }
5333
5334 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5335 mCacMode =
5336 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5337 }
5338
5339 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005340 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005341
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005342 {
5343 Mutex::Autolock l(gHdrPlusClientLock);
5344 // If this request has a still capture intent, try to submit an HDR+ request.
5345 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5346 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5347 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5348 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005349 }
5350
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005351 if (hdrPlusRequest) {
5352 // For a HDR+ request, just set the frame parameters.
5353 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5354 if (rc < 0) {
5355 LOGE("fail to set frame parameters");
5356 pthread_mutex_unlock(&mMutex);
5357 return rc;
5358 }
5359 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005360 /* Parse the settings:
5361 * - For every request in NORMAL MODE
5362 * - For every request in HFR mode during preview only case
5363 * - For first request of every batch in HFR mode during video
5364 * recording. In batchmode the same settings except frame number is
5365 * repeated in each request of the batch.
5366 */
5367 if (!mBatchSize ||
5368 (mBatchSize && !isVidBufRequested) ||
5369 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005370 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005371 if (rc < 0) {
5372 LOGE("fail to set frame parameters");
5373 pthread_mutex_unlock(&mMutex);
5374 return rc;
5375 }
5376 }
5377 /* For batchMode HFR, setFrameParameters is not called for every
5378 * request. But only frame number of the latest request is parsed.
5379 * Keep track of first and last frame numbers in a batch so that
5380 * metadata for the frame numbers of batch can be duplicated in
5381 * handleBatchMetadta */
5382 if (mBatchSize) {
5383 if (!mToBeQueuedVidBufs) {
5384 //start of the batch
5385 mFirstFrameNumberInBatch = request->frame_number;
5386 }
5387 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5388 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5389 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005390 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005391 return BAD_VALUE;
5392 }
5393 }
5394 if (mNeedSensorRestart) {
5395 /* Unlock the mutex as restartSensor waits on the channels to be
5396 * stopped, which in turn calls stream callback functions -
5397 * handleBufferWithLock and handleMetadataWithLock */
5398 pthread_mutex_unlock(&mMutex);
5399 rc = dynamicUpdateMetaStreamInfo();
5400 if (rc != NO_ERROR) {
5401 LOGE("Restarting the sensor failed");
5402 return BAD_VALUE;
5403 }
5404 mNeedSensorRestart = false;
5405 pthread_mutex_lock(&mMutex);
5406 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005407 if(mResetInstantAEC) {
5408 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5409 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5410 mResetInstantAEC = false;
5411 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005412 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005413 if (request->input_buffer->acquire_fence != -1) {
5414 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5415 close(request->input_buffer->acquire_fence);
5416 if (rc != OK) {
5417 LOGE("input buffer sync wait failed %d", rc);
5418 pthread_mutex_unlock(&mMutex);
5419 return rc;
5420 }
5421 }
5422 }
5423
5424 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5425 mLastCustIntentFrmNum = frameNumber;
5426 }
5427 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005428 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005429 pendingRequestIterator latestRequest;
5430 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005431 pendingRequest.num_buffers = depthRequestPresent ?
5432 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005433 pendingRequest.request_id = request_id;
5434 pendingRequest.blob_request = blob_request;
5435 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005436 if (request->input_buffer) {
5437 pendingRequest.input_buffer =
5438 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5439 *(pendingRequest.input_buffer) = *(request->input_buffer);
5440 pInputBuffer = pendingRequest.input_buffer;
5441 } else {
5442 pendingRequest.input_buffer = NULL;
5443 pInputBuffer = NULL;
5444 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005445 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005446
5447 pendingRequest.pipeline_depth = 0;
5448 pendingRequest.partial_result_cnt = 0;
5449 extractJpegMetadata(mCurJpegMeta, request);
5450 pendingRequest.jpegMetadata = mCurJpegMeta;
5451 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005452 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005453 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5454 mHybridAeEnable =
5455 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5456 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005457
5458 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5459 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005460 /* DevCamDebug metadata processCaptureRequest */
5461 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5462 mDevCamDebugMetaEnable =
5463 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5464 }
5465 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5466 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005467
5468 //extract CAC info
5469 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5470 mCacMode =
5471 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5472 }
5473 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005474 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005475
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005476 // extract enableZsl info
5477 if (gExposeEnableZslKey) {
5478 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5479 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5480 mZslEnabled = pendingRequest.enableZsl;
5481 } else {
5482 pendingRequest.enableZsl = mZslEnabled;
5483 }
5484 }
5485
Thierry Strudel3d639192016-09-09 11:52:26 -07005486 PendingBuffersInRequest bufsForCurRequest;
5487 bufsForCurRequest.frame_number = frameNumber;
5488 // Mark current timestamp for the new request
5489 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005490 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005491
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005492 if (hdrPlusRequest) {
5493 // Save settings for this request.
5494 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5495 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5496
5497 // Add to pending HDR+ request queue.
5498 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5499 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5500
5501 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5502 }
5503
Thierry Strudel3d639192016-09-09 11:52:26 -07005504 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005505 if ((request->output_buffers[i].stream->data_space ==
5506 HAL_DATASPACE_DEPTH) &&
5507 (HAL_PIXEL_FORMAT_BLOB ==
5508 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005509 continue;
5510 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005511 RequestedBufferInfo requestedBuf;
5512 memset(&requestedBuf, 0, sizeof(requestedBuf));
5513 requestedBuf.stream = request->output_buffers[i].stream;
5514 requestedBuf.buffer = NULL;
5515 pendingRequest.buffers.push_back(requestedBuf);
5516
5517 // Add to buffer handle the pending buffers list
5518 PendingBufferInfo bufferInfo;
5519 bufferInfo.buffer = request->output_buffers[i].buffer;
5520 bufferInfo.stream = request->output_buffers[i].stream;
5521 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5522 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5523 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5524 frameNumber, bufferInfo.buffer,
5525 channel->getStreamTypeMask(), bufferInfo.stream->format);
5526 }
5527 // Add this request packet into mPendingBuffersMap
5528 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5529 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5530 mPendingBuffersMap.get_num_overall_buffers());
5531
5532 latestRequest = mPendingRequestsList.insert(
5533 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005534
5535 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5536 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005537 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005538 for (size_t i = 0; i < request->num_output_buffers; i++) {
5539 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5540 }
5541
Thierry Strudel3d639192016-09-09 11:52:26 -07005542 if(mFlush) {
5543 LOGI("mFlush is true");
5544 pthread_mutex_unlock(&mMutex);
5545 return NO_ERROR;
5546 }
5547
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005548 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5549 // channel.
5550 if (!hdrPlusRequest) {
5551 int indexUsed;
5552 // Notify metadata channel we receive a request
5553 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005554
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005555 if(request->input_buffer != NULL){
5556 LOGD("Input request, frame_number %d", frameNumber);
5557 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5558 if (NO_ERROR != rc) {
5559 LOGE("fail to set reproc parameters");
5560 pthread_mutex_unlock(&mMutex);
5561 return rc;
5562 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005563 }
5564
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005565 // Call request on other streams
5566 uint32_t streams_need_metadata = 0;
5567 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5568 for (size_t i = 0; i < request->num_output_buffers; i++) {
5569 const camera3_stream_buffer_t& output = request->output_buffers[i];
5570 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5571
5572 if (channel == NULL) {
5573 LOGW("invalid channel pointer for stream");
5574 continue;
5575 }
5576
5577 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5578 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5579 output.buffer, request->input_buffer, frameNumber);
5580 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005581 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005582 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5583 if (rc < 0) {
5584 LOGE("Fail to request on picture channel");
5585 pthread_mutex_unlock(&mMutex);
5586 return rc;
5587 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005588 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005589 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5590 assert(NULL != mDepthChannel);
5591 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005592
Emilian Peev7650c122017-01-19 08:24:33 -08005593 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5594 if (rc < 0) {
5595 LOGE("Fail to map on depth buffer");
5596 pthread_mutex_unlock(&mMutex);
5597 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005598 }
Emilian Peev7650c122017-01-19 08:24:33 -08005599 } else {
5600 LOGD("snapshot request with buffer %p, frame_number %d",
5601 output.buffer, frameNumber);
5602 if (!request->settings) {
5603 rc = channel->request(output.buffer, frameNumber,
5604 NULL, mPrevParameters, indexUsed);
5605 } else {
5606 rc = channel->request(output.buffer, frameNumber,
5607 NULL, mParameters, indexUsed);
5608 }
5609 if (rc < 0) {
5610 LOGE("Fail to request on picture channel");
5611 pthread_mutex_unlock(&mMutex);
5612 return rc;
5613 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005614
Emilian Peev7650c122017-01-19 08:24:33 -08005615 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5616 uint32_t j = 0;
5617 for (j = 0; j < streamsArray.num_streams; j++) {
5618 if (streamsArray.stream_request[j].streamID == streamId) {
5619 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5620 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5621 else
5622 streamsArray.stream_request[j].buf_index = indexUsed;
5623 break;
5624 }
5625 }
5626 if (j == streamsArray.num_streams) {
5627 LOGE("Did not find matching stream to update index");
5628 assert(0);
5629 }
5630
5631 pendingBufferIter->need_metadata = true;
5632 streams_need_metadata++;
5633 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005634 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005635 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5636 bool needMetadata = false;
5637 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5638 rc = yuvChannel->request(output.buffer, frameNumber,
5639 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5640 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005641 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005642 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005643 pthread_mutex_unlock(&mMutex);
5644 return rc;
5645 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005646
5647 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5648 uint32_t j = 0;
5649 for (j = 0; j < streamsArray.num_streams; j++) {
5650 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005651 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5652 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5653 else
5654 streamsArray.stream_request[j].buf_index = indexUsed;
5655 break;
5656 }
5657 }
5658 if (j == streamsArray.num_streams) {
5659 LOGE("Did not find matching stream to update index");
5660 assert(0);
5661 }
5662
5663 pendingBufferIter->need_metadata = needMetadata;
5664 if (needMetadata)
5665 streams_need_metadata += 1;
5666 LOGD("calling YUV channel request, need_metadata is %d",
5667 needMetadata);
5668 } else {
5669 LOGD("request with buffer %p, frame_number %d",
5670 output.buffer, frameNumber);
5671
5672 rc = channel->request(output.buffer, frameNumber, indexUsed);
5673
5674 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5675 uint32_t j = 0;
5676 for (j = 0; j < streamsArray.num_streams; j++) {
5677 if (streamsArray.stream_request[j].streamID == streamId) {
5678 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5679 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5680 else
5681 streamsArray.stream_request[j].buf_index = indexUsed;
5682 break;
5683 }
5684 }
5685 if (j == streamsArray.num_streams) {
5686 LOGE("Did not find matching stream to update index");
5687 assert(0);
5688 }
5689
5690 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5691 && mBatchSize) {
5692 mToBeQueuedVidBufs++;
5693 if (mToBeQueuedVidBufs == mBatchSize) {
5694 channel->queueBatchBuf();
5695 }
5696 }
5697 if (rc < 0) {
5698 LOGE("request failed");
5699 pthread_mutex_unlock(&mMutex);
5700 return rc;
5701 }
5702 }
5703 pendingBufferIter++;
5704 }
5705
5706 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5707 itr++) {
5708 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5709
5710 if (channel == NULL) {
5711 LOGE("invalid channel pointer for stream");
5712 assert(0);
5713 return BAD_VALUE;
5714 }
5715
5716 InternalRequest requestedStream;
5717 requestedStream = (*itr);
5718
5719
5720 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5721 LOGD("snapshot request internally input buffer %p, frame_number %d",
5722 request->input_buffer, frameNumber);
5723 if(request->input_buffer != NULL){
5724 rc = channel->request(NULL, frameNumber,
5725 pInputBuffer, &mReprocMeta, indexUsed, true,
5726 requestedStream.meteringOnly);
5727 if (rc < 0) {
5728 LOGE("Fail to request on picture channel");
5729 pthread_mutex_unlock(&mMutex);
5730 return rc;
5731 }
5732 } else {
5733 LOGD("snapshot request with frame_number %d", frameNumber);
5734 if (!request->settings) {
5735 rc = channel->request(NULL, frameNumber,
5736 NULL, mPrevParameters, indexUsed, true,
5737 requestedStream.meteringOnly);
5738 } else {
5739 rc = channel->request(NULL, frameNumber,
5740 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5741 }
5742 if (rc < 0) {
5743 LOGE("Fail to request on picture channel");
5744 pthread_mutex_unlock(&mMutex);
5745 return rc;
5746 }
5747
5748 if ((*itr).meteringOnly != 1) {
5749 requestedStream.need_metadata = 1;
5750 streams_need_metadata++;
5751 }
5752 }
5753
5754 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5755 uint32_t j = 0;
5756 for (j = 0; j < streamsArray.num_streams; j++) {
5757 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005758 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5759 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5760 else
5761 streamsArray.stream_request[j].buf_index = indexUsed;
5762 break;
5763 }
5764 }
5765 if (j == streamsArray.num_streams) {
5766 LOGE("Did not find matching stream to update index");
5767 assert(0);
5768 }
5769
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005770 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005771 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005772 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005773 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005774 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005775 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005776 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005777
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005778 //If 2 streams have need_metadata set to true, fail the request, unless
5779 //we copy/reference count the metadata buffer
5780 if (streams_need_metadata > 1) {
5781 LOGE("not supporting request in which two streams requires"
5782 " 2 HAL metadata for reprocessing");
5783 pthread_mutex_unlock(&mMutex);
5784 return -EINVAL;
5785 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005786
Emilian Peev656e4fa2017-06-02 16:47:04 +01005787 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5788 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5789 if (depthRequestPresent && mDepthChannel) {
5790 if (request->settings) {
5791 camera_metadata_ro_entry entry;
5792 if (find_camera_metadata_ro_entry(request->settings,
5793 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5794 if (entry.data.u8[0]) {
5795 pdafEnable = CAM_PD_DATA_ENABLED;
5796 } else {
5797 pdafEnable = CAM_PD_DATA_SKIP;
5798 }
5799 mDepthCloudMode = pdafEnable;
5800 } else {
5801 pdafEnable = mDepthCloudMode;
5802 }
5803 } else {
5804 pdafEnable = mDepthCloudMode;
5805 }
5806 }
5807
Emilian Peev7650c122017-01-19 08:24:33 -08005808 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5809 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5810 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5811 pthread_mutex_unlock(&mMutex);
5812 return BAD_VALUE;
5813 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005814
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005815 if (request->input_buffer == NULL) {
5816 /* Set the parameters to backend:
5817 * - For every request in NORMAL MODE
5818 * - For every request in HFR mode during preview only case
5819 * - Once every batch in HFR mode during video recording
5820 */
5821 if (!mBatchSize ||
5822 (mBatchSize && !isVidBufRequested) ||
5823 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5824 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5825 mBatchSize, isVidBufRequested,
5826 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005827
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005828 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5829 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5830 uint32_t m = 0;
5831 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5832 if (streamsArray.stream_request[k].streamID ==
5833 mBatchedStreamsArray.stream_request[m].streamID)
5834 break;
5835 }
5836 if (m == mBatchedStreamsArray.num_streams) {
5837 mBatchedStreamsArray.stream_request\
5838 [mBatchedStreamsArray.num_streams].streamID =
5839 streamsArray.stream_request[k].streamID;
5840 mBatchedStreamsArray.stream_request\
5841 [mBatchedStreamsArray.num_streams].buf_index =
5842 streamsArray.stream_request[k].buf_index;
5843 mBatchedStreamsArray.num_streams =
5844 mBatchedStreamsArray.num_streams + 1;
5845 }
5846 }
5847 streamsArray = mBatchedStreamsArray;
5848 }
5849 /* Update stream id of all the requested buffers */
5850 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5851 streamsArray)) {
5852 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005853 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005854 return BAD_VALUE;
5855 }
5856
5857 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5858 mParameters);
5859 if (rc < 0) {
5860 LOGE("set_parms failed");
5861 }
5862 /* reset to zero coz, the batch is queued */
5863 mToBeQueuedVidBufs = 0;
5864 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5865 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5866 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005867 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5868 uint32_t m = 0;
5869 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5870 if (streamsArray.stream_request[k].streamID ==
5871 mBatchedStreamsArray.stream_request[m].streamID)
5872 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005873 }
5874 if (m == mBatchedStreamsArray.num_streams) {
5875 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5876 streamID = streamsArray.stream_request[k].streamID;
5877 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5878 buf_index = streamsArray.stream_request[k].buf_index;
5879 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5880 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005881 }
5882 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005883 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005884
5885 // Start all streams after the first setting is sent, so that the
5886 // setting can be applied sooner: (0 + apply_delay)th frame.
5887 if (mState == CONFIGURED && mChannelHandle) {
5888 //Then start them.
5889 LOGH("Start META Channel");
5890 rc = mMetadataChannel->start();
5891 if (rc < 0) {
5892 LOGE("META channel start failed");
5893 pthread_mutex_unlock(&mMutex);
5894 return rc;
5895 }
5896
5897 if (mAnalysisChannel) {
5898 rc = mAnalysisChannel->start();
5899 if (rc < 0) {
5900 LOGE("Analysis channel start failed");
5901 mMetadataChannel->stop();
5902 pthread_mutex_unlock(&mMutex);
5903 return rc;
5904 }
5905 }
5906
5907 if (mSupportChannel) {
5908 rc = mSupportChannel->start();
5909 if (rc < 0) {
5910 LOGE("Support channel start failed");
5911 mMetadataChannel->stop();
5912 /* Although support and analysis are mutually exclusive today
5913 adding it in anycase for future proofing */
5914 if (mAnalysisChannel) {
5915 mAnalysisChannel->stop();
5916 }
5917 pthread_mutex_unlock(&mMutex);
5918 return rc;
5919 }
5920 }
5921 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5922 it != mStreamInfo.end(); it++) {
5923 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5924 LOGH("Start Processing Channel mask=%d",
5925 channel->getStreamTypeMask());
5926 rc = channel->start();
5927 if (rc < 0) {
5928 LOGE("channel start failed");
5929 pthread_mutex_unlock(&mMutex);
5930 return rc;
5931 }
5932 }
5933
5934 if (mRawDumpChannel) {
5935 LOGD("Starting raw dump stream");
5936 rc = mRawDumpChannel->start();
5937 if (rc != NO_ERROR) {
5938 LOGE("Error Starting Raw Dump Channel");
5939 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5940 it != mStreamInfo.end(); it++) {
5941 QCamera3Channel *channel =
5942 (QCamera3Channel *)(*it)->stream->priv;
5943 LOGH("Stopping Processing Channel mask=%d",
5944 channel->getStreamTypeMask());
5945 channel->stop();
5946 }
5947 if (mSupportChannel)
5948 mSupportChannel->stop();
5949 if (mAnalysisChannel) {
5950 mAnalysisChannel->stop();
5951 }
5952 mMetadataChannel->stop();
5953 pthread_mutex_unlock(&mMutex);
5954 return rc;
5955 }
5956 }
5957
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005958 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005959 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005960 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005961 if (rc != NO_ERROR) {
5962 LOGE("start_channel failed %d", rc);
5963 pthread_mutex_unlock(&mMutex);
5964 return rc;
5965 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005966
5967 {
5968 // Configure Easel for stream on.
5969 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005970
5971 // Now that sensor mode should have been selected, get the selected sensor mode
5972 // info.
5973 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5974 getCurrentSensorModeInfo(mSensorModeInfo);
5975
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005976 if (EaselManagerClientOpened) {
5977 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen44abb642017-06-02 18:00:38 -07005978 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5979 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005980 if (rc != OK) {
5981 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5982 mCameraId, mSensorModeInfo.op_pixel_clk);
5983 pthread_mutex_unlock(&mMutex);
5984 return rc;
5985 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005986 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005987 }
5988 }
5989
5990 // Start sensor streaming.
5991 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5992 mChannelHandle);
5993 if (rc != NO_ERROR) {
5994 LOGE("start_sensor_stream_on failed %d", rc);
5995 pthread_mutex_unlock(&mMutex);
5996 return rc;
5997 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005998 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005999 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006000 }
6001
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006002 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07006003 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006004 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006005 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006006 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6007 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6008 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6009 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
6010 rc = enableHdrPlusModeLocked();
6011 if (rc != OK) {
6012 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6013 pthread_mutex_unlock(&mMutex);
6014 return rc;
6015 }
6016
6017 mFirstPreviewIntentSeen = true;
6018 }
6019 }
6020
Thierry Strudel3d639192016-09-09 11:52:26 -07006021 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6022
6023 mState = STARTED;
6024 // Added a timed condition wait
6025 struct timespec ts;
6026 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006027 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006028 if (rc < 0) {
6029 isValidTimeout = 0;
6030 LOGE("Error reading the real time clock!!");
6031 }
6032 else {
6033 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006034 int64_t timeout = 5;
6035 {
6036 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6037 // If there is a pending HDR+ request, the following requests may be blocked until the
6038 // HDR+ request is done. So allow a longer timeout.
6039 if (mHdrPlusPendingRequests.size() > 0) {
6040 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6041 }
6042 }
6043 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006044 }
6045 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006046 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006047 (mState != ERROR) && (mState != DEINIT)) {
6048 if (!isValidTimeout) {
6049 LOGD("Blocking on conditional wait");
6050 pthread_cond_wait(&mRequestCond, &mMutex);
6051 }
6052 else {
6053 LOGD("Blocking on timed conditional wait");
6054 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6055 if (rc == ETIMEDOUT) {
6056 rc = -ENODEV;
6057 LOGE("Unblocked on timeout!!!!");
6058 break;
6059 }
6060 }
6061 LOGD("Unblocked");
6062 if (mWokenUpByDaemon) {
6063 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006064 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006065 break;
6066 }
6067 }
6068 pthread_mutex_unlock(&mMutex);
6069
6070 return rc;
6071}
6072
6073/*===========================================================================
6074 * FUNCTION : dump
6075 *
6076 * DESCRIPTION:
6077 *
6078 * PARAMETERS :
6079 *
6080 *
6081 * RETURN :
6082 *==========================================================================*/
6083void QCamera3HardwareInterface::dump(int fd)
6084{
6085 pthread_mutex_lock(&mMutex);
6086 dprintf(fd, "\n Camera HAL3 information Begin \n");
6087
6088 dprintf(fd, "\nNumber of pending requests: %zu \n",
6089 mPendingRequestsList.size());
6090 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6091 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6092 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6093 for(pendingRequestIterator i = mPendingRequestsList.begin();
6094 i != mPendingRequestsList.end(); i++) {
6095 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6096 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6097 i->input_buffer);
6098 }
6099 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6100 mPendingBuffersMap.get_num_overall_buffers());
6101 dprintf(fd, "-------+------------------\n");
6102 dprintf(fd, " Frame | Stream type mask \n");
6103 dprintf(fd, "-------+------------------\n");
6104 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6105 for(auto &j : req.mPendingBufferList) {
6106 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6107 dprintf(fd, " %5d | %11d \n",
6108 req.frame_number, channel->getStreamTypeMask());
6109 }
6110 }
6111 dprintf(fd, "-------+------------------\n");
6112
6113 dprintf(fd, "\nPending frame drop list: %zu\n",
6114 mPendingFrameDropList.size());
6115 dprintf(fd, "-------+-----------\n");
6116 dprintf(fd, " Frame | Stream ID \n");
6117 dprintf(fd, "-------+-----------\n");
6118 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6119 i != mPendingFrameDropList.end(); i++) {
6120 dprintf(fd, " %5d | %9d \n",
6121 i->frame_number, i->stream_ID);
6122 }
6123 dprintf(fd, "-------+-----------\n");
6124
6125 dprintf(fd, "\n Camera HAL3 information End \n");
6126
6127 /* use dumpsys media.camera as trigger to send update debug level event */
6128 mUpdateDebugLevel = true;
6129 pthread_mutex_unlock(&mMutex);
6130 return;
6131}
6132
6133/*===========================================================================
6134 * FUNCTION : flush
6135 *
6136 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6137 * conditionally restarts channels
6138 *
6139 * PARAMETERS :
6140 * @ restartChannels: re-start all channels
6141 *
6142 *
6143 * RETURN :
6144 * 0 on success
6145 * Error code on failure
6146 *==========================================================================*/
6147int QCamera3HardwareInterface::flush(bool restartChannels)
6148{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006149 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006150 int32_t rc = NO_ERROR;
6151
6152 LOGD("Unblocking Process Capture Request");
6153 pthread_mutex_lock(&mMutex);
6154 mFlush = true;
6155 pthread_mutex_unlock(&mMutex);
6156
6157 rc = stopAllChannels();
6158 // unlink of dualcam
6159 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006160 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6161 &m_pDualCamCmdPtr->bundle_info;
6162 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006163 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6164 pthread_mutex_lock(&gCamLock);
6165
6166 if (mIsMainCamera == 1) {
6167 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6168 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006169 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006170 // related session id should be session id of linked session
6171 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6172 } else {
6173 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6174 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006175 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006176 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6177 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006178 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006179 pthread_mutex_unlock(&gCamLock);
6180
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006181 rc = mCameraHandle->ops->set_dual_cam_cmd(
6182 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006183 if (rc < 0) {
6184 LOGE("Dualcam: Unlink failed, but still proceed to close");
6185 }
6186 }
6187
6188 if (rc < 0) {
6189 LOGE("stopAllChannels failed");
6190 return rc;
6191 }
6192 if (mChannelHandle) {
6193 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6194 mChannelHandle);
6195 }
6196
6197 // Reset bundle info
6198 rc = setBundleInfo();
6199 if (rc < 0) {
6200 LOGE("setBundleInfo failed %d", rc);
6201 return rc;
6202 }
6203
6204 // Mutex Lock
6205 pthread_mutex_lock(&mMutex);
6206
6207 // Unblock process_capture_request
6208 mPendingLiveRequest = 0;
6209 pthread_cond_signal(&mRequestCond);
6210
6211 rc = notifyErrorForPendingRequests();
6212 if (rc < 0) {
6213 LOGE("notifyErrorForPendingRequests failed");
6214 pthread_mutex_unlock(&mMutex);
6215 return rc;
6216 }
6217
6218 mFlush = false;
6219
6220 // Start the Streams/Channels
6221 if (restartChannels) {
6222 rc = startAllChannels();
6223 if (rc < 0) {
6224 LOGE("startAllChannels failed");
6225 pthread_mutex_unlock(&mMutex);
6226 return rc;
6227 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006228 if (mChannelHandle) {
6229 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006230 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006231 if (rc < 0) {
6232 LOGE("start_channel failed");
6233 pthread_mutex_unlock(&mMutex);
6234 return rc;
6235 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006236 }
6237 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006238 pthread_mutex_unlock(&mMutex);
6239
6240 return 0;
6241}
6242
6243/*===========================================================================
6244 * FUNCTION : flushPerf
6245 *
6246 * DESCRIPTION: This is the performance optimization version of flush that does
6247 * not use stream off, rather flushes the system
6248 *
6249 * PARAMETERS :
6250 *
6251 *
6252 * RETURN : 0 : success
6253 * -EINVAL: input is malformed (device is not valid)
6254 * -ENODEV: if the device has encountered a serious error
6255 *==========================================================================*/
6256int QCamera3HardwareInterface::flushPerf()
6257{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006258 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006259 int32_t rc = 0;
6260 struct timespec timeout;
6261 bool timed_wait = false;
6262
6263 pthread_mutex_lock(&mMutex);
6264 mFlushPerf = true;
6265 mPendingBuffersMap.numPendingBufsAtFlush =
6266 mPendingBuffersMap.get_num_overall_buffers();
6267 LOGD("Calling flush. Wait for %d buffers to return",
6268 mPendingBuffersMap.numPendingBufsAtFlush);
6269
6270 /* send the flush event to the backend */
6271 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6272 if (rc < 0) {
6273 LOGE("Error in flush: IOCTL failure");
6274 mFlushPerf = false;
6275 pthread_mutex_unlock(&mMutex);
6276 return -ENODEV;
6277 }
6278
6279 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6280 LOGD("No pending buffers in HAL, return flush");
6281 mFlushPerf = false;
6282 pthread_mutex_unlock(&mMutex);
6283 return rc;
6284 }
6285
6286 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006287 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006288 if (rc < 0) {
6289 LOGE("Error reading the real time clock, cannot use timed wait");
6290 } else {
6291 timeout.tv_sec += FLUSH_TIMEOUT;
6292 timed_wait = true;
6293 }
6294
6295 //Block on conditional variable
6296 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6297 LOGD("Waiting on mBuffersCond");
6298 if (!timed_wait) {
6299 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6300 if (rc != 0) {
6301 LOGE("pthread_cond_wait failed due to rc = %s",
6302 strerror(rc));
6303 break;
6304 }
6305 } else {
6306 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6307 if (rc != 0) {
6308 LOGE("pthread_cond_timedwait failed due to rc = %s",
6309 strerror(rc));
6310 break;
6311 }
6312 }
6313 }
6314 if (rc != 0) {
6315 mFlushPerf = false;
6316 pthread_mutex_unlock(&mMutex);
6317 return -ENODEV;
6318 }
6319
6320 LOGD("Received buffers, now safe to return them");
6321
6322 //make sure the channels handle flush
6323 //currently only required for the picture channel to release snapshot resources
6324 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6325 it != mStreamInfo.end(); it++) {
6326 QCamera3Channel *channel = (*it)->channel;
6327 if (channel) {
6328 rc = channel->flush();
6329 if (rc) {
6330 LOGE("Flushing the channels failed with error %d", rc);
6331 // even though the channel flush failed we need to continue and
6332 // return the buffers we have to the framework, however the return
6333 // value will be an error
6334 rc = -ENODEV;
6335 }
6336 }
6337 }
6338
6339 /* notify the frameworks and send errored results */
6340 rc = notifyErrorForPendingRequests();
6341 if (rc < 0) {
6342 LOGE("notifyErrorForPendingRequests failed");
6343 pthread_mutex_unlock(&mMutex);
6344 return rc;
6345 }
6346
6347 //unblock process_capture_request
6348 mPendingLiveRequest = 0;
6349 unblockRequestIfNecessary();
6350
6351 mFlushPerf = false;
6352 pthread_mutex_unlock(&mMutex);
6353 LOGD ("Flush Operation complete. rc = %d", rc);
6354 return rc;
6355}
6356
6357/*===========================================================================
6358 * FUNCTION : handleCameraDeviceError
6359 *
6360 * DESCRIPTION: This function calls internal flush and notifies the error to
6361 * framework and updates the state variable.
6362 *
6363 * PARAMETERS : None
6364 *
6365 * RETURN : NO_ERROR on Success
6366 * Error code on failure
6367 *==========================================================================*/
6368int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6369{
6370 int32_t rc = NO_ERROR;
6371
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006372 {
6373 Mutex::Autolock lock(mFlushLock);
6374 pthread_mutex_lock(&mMutex);
6375 if (mState != ERROR) {
6376 //if mState != ERROR, nothing to be done
6377 pthread_mutex_unlock(&mMutex);
6378 return NO_ERROR;
6379 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006380 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006381
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006382 rc = flush(false /* restart channels */);
6383 if (NO_ERROR != rc) {
6384 LOGE("internal flush to handle mState = ERROR failed");
6385 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006386
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006387 pthread_mutex_lock(&mMutex);
6388 mState = DEINIT;
6389 pthread_mutex_unlock(&mMutex);
6390 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006391
6392 camera3_notify_msg_t notify_msg;
6393 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6394 notify_msg.type = CAMERA3_MSG_ERROR;
6395 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6396 notify_msg.message.error.error_stream = NULL;
6397 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006398 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006399
6400 return rc;
6401}
6402
6403/*===========================================================================
6404 * FUNCTION : captureResultCb
6405 *
6406 * DESCRIPTION: Callback handler for all capture result
6407 * (streams, as well as metadata)
6408 *
6409 * PARAMETERS :
6410 * @metadata : metadata information
6411 * @buffer : actual gralloc buffer to be returned to frameworks.
6412 * NULL if metadata.
6413 *
6414 * RETURN : NONE
6415 *==========================================================================*/
6416void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6417 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6418{
6419 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006420 pthread_mutex_lock(&mMutex);
6421 uint8_t batchSize = mBatchSize;
6422 pthread_mutex_unlock(&mMutex);
6423 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006424 handleBatchMetadata(metadata_buf,
6425 true /* free_and_bufdone_meta_buf */);
6426 } else { /* mBatchSize = 0 */
6427 hdrPlusPerfLock(metadata_buf);
6428 pthread_mutex_lock(&mMutex);
6429 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006430 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006431 true /* last urgent frame of batch metadata */,
6432 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006433 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006434 pthread_mutex_unlock(&mMutex);
6435 }
6436 } else if (isInputBuffer) {
6437 pthread_mutex_lock(&mMutex);
6438 handleInputBufferWithLock(frame_number);
6439 pthread_mutex_unlock(&mMutex);
6440 } else {
6441 pthread_mutex_lock(&mMutex);
6442 handleBufferWithLock(buffer, frame_number);
6443 pthread_mutex_unlock(&mMutex);
6444 }
6445 return;
6446}
6447
6448/*===========================================================================
6449 * FUNCTION : getReprocessibleOutputStreamId
6450 *
6451 * DESCRIPTION: Get source output stream id for the input reprocess stream
6452 * based on size and format, which would be the largest
6453 * output stream if an input stream exists.
6454 *
6455 * PARAMETERS :
6456 * @id : return the stream id if found
6457 *
6458 * RETURN : int32_t type of status
6459 * NO_ERROR -- success
6460 * none-zero failure code
6461 *==========================================================================*/
6462int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6463{
6464 /* check if any output or bidirectional stream with the same size and format
6465 and return that stream */
6466 if ((mInputStreamInfo.dim.width > 0) &&
6467 (mInputStreamInfo.dim.height > 0)) {
6468 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6469 it != mStreamInfo.end(); it++) {
6470
6471 camera3_stream_t *stream = (*it)->stream;
6472 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6473 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6474 (stream->format == mInputStreamInfo.format)) {
6475 // Usage flag for an input stream and the source output stream
6476 // may be different.
6477 LOGD("Found reprocessible output stream! %p", *it);
6478 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6479 stream->usage, mInputStreamInfo.usage);
6480
6481 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6482 if (channel != NULL && channel->mStreams[0]) {
6483 id = channel->mStreams[0]->getMyServerID();
6484 return NO_ERROR;
6485 }
6486 }
6487 }
6488 } else {
6489 LOGD("No input stream, so no reprocessible output stream");
6490 }
6491 return NAME_NOT_FOUND;
6492}
6493
6494/*===========================================================================
6495 * FUNCTION : lookupFwkName
6496 *
6497 * DESCRIPTION: In case the enum is not same in fwk and backend
6498 * make sure the parameter is correctly propogated
6499 *
6500 * PARAMETERS :
6501 * @arr : map between the two enums
6502 * @len : len of the map
6503 * @hal_name : name of the hal_parm to map
6504 *
6505 * RETURN : int type of status
6506 * fwk_name -- success
6507 * none-zero failure code
6508 *==========================================================================*/
6509template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6510 size_t len, halType hal_name)
6511{
6512
6513 for (size_t i = 0; i < len; i++) {
6514 if (arr[i].hal_name == hal_name) {
6515 return arr[i].fwk_name;
6516 }
6517 }
6518
6519 /* Not able to find matching framework type is not necessarily
6520 * an error case. This happens when mm-camera supports more attributes
6521 * than the frameworks do */
6522 LOGH("Cannot find matching framework type");
6523 return NAME_NOT_FOUND;
6524}
6525
6526/*===========================================================================
6527 * FUNCTION : lookupHalName
6528 *
6529 * DESCRIPTION: In case the enum is not same in fwk and backend
6530 * make sure the parameter is correctly propogated
6531 *
6532 * PARAMETERS :
6533 * @arr : map between the two enums
6534 * @len : len of the map
6535 * @fwk_name : name of the hal_parm to map
6536 *
6537 * RETURN : int32_t type of status
6538 * hal_name -- success
6539 * none-zero failure code
6540 *==========================================================================*/
6541template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6542 size_t len, fwkType fwk_name)
6543{
6544 for (size_t i = 0; i < len; i++) {
6545 if (arr[i].fwk_name == fwk_name) {
6546 return arr[i].hal_name;
6547 }
6548 }
6549
6550 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6551 return NAME_NOT_FOUND;
6552}
6553
6554/*===========================================================================
6555 * FUNCTION : lookupProp
6556 *
6557 * DESCRIPTION: lookup a value by its name
6558 *
6559 * PARAMETERS :
6560 * @arr : map between the two enums
6561 * @len : size of the map
6562 * @name : name to be looked up
6563 *
6564 * RETURN : Value if found
6565 * CAM_CDS_MODE_MAX if not found
6566 *==========================================================================*/
6567template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6568 size_t len, const char *name)
6569{
6570 if (name) {
6571 for (size_t i = 0; i < len; i++) {
6572 if (!strcmp(arr[i].desc, name)) {
6573 return arr[i].val;
6574 }
6575 }
6576 }
6577 return CAM_CDS_MODE_MAX;
6578}
6579
6580/*===========================================================================
6581 *
6582 * DESCRIPTION:
6583 *
6584 * PARAMETERS :
6585 * @metadata : metadata information from callback
6586 * @timestamp: metadata buffer timestamp
6587 * @request_id: request id
6588 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006589 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006590 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6591 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006592 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006593 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6594 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006595 *
6596 * RETURN : camera_metadata_t*
6597 * metadata in a format specified by fwk
6598 *==========================================================================*/
6599camera_metadata_t*
6600QCamera3HardwareInterface::translateFromHalMetadata(
6601 metadata_buffer_t *metadata,
6602 nsecs_t timestamp,
6603 int32_t request_id,
6604 const CameraMetadata& jpegMetadata,
6605 uint8_t pipeline_depth,
6606 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006607 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006608 /* DevCamDebug metadata translateFromHalMetadata argument */
6609 uint8_t DevCamDebug_meta_enable,
6610 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006611 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006612 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006613 bool lastMetadataInBatch,
6614 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006615{
6616 CameraMetadata camMetadata;
6617 camera_metadata_t *resultMetadata;
6618
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006619 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006620 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6621 * Timestamp is needed because it's used for shutter notify calculation.
6622 * */
6623 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6624 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006625 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006626 }
6627
Thierry Strudel3d639192016-09-09 11:52:26 -07006628 if (jpegMetadata.entryCount())
6629 camMetadata.append(jpegMetadata);
6630
6631 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6632 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6633 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6634 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006635 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006636 if (mBatchSize == 0) {
6637 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6638 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6639 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006640
Samuel Ha68ba5172016-12-15 18:41:12 -08006641 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6642 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6643 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6644 // DevCamDebug metadata translateFromHalMetadata AF
6645 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6646 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6647 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6648 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6649 }
6650 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6651 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6652 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6653 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6654 }
6655 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6656 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6657 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6658 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6659 }
6660 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6661 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6662 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6663 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6664 }
6665 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6666 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6667 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6668 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6669 }
6670 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6671 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6672 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6673 *DevCamDebug_af_monitor_pdaf_target_pos;
6674 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6675 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6676 }
6677 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6678 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6679 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6680 *DevCamDebug_af_monitor_pdaf_confidence;
6681 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6682 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6683 }
6684 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6685 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6686 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6687 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6688 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6689 }
6690 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6691 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6692 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6693 *DevCamDebug_af_monitor_tof_target_pos;
6694 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6695 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6696 }
6697 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6698 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6699 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6700 *DevCamDebug_af_monitor_tof_confidence;
6701 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6702 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6703 }
6704 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6705 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6706 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6707 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6708 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6709 }
6710 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6711 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6712 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6713 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6714 &fwk_DevCamDebug_af_monitor_type_select, 1);
6715 }
6716 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6717 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6718 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6719 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6720 &fwk_DevCamDebug_af_monitor_refocus, 1);
6721 }
6722 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6723 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6724 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6725 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6726 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6727 }
6728 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6729 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6730 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6731 *DevCamDebug_af_search_pdaf_target_pos;
6732 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6733 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6734 }
6735 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6736 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6737 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6738 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6739 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6740 }
6741 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6742 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6743 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6744 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6745 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6746 }
6747 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6748 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6749 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6750 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6751 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6752 }
6753 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6754 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6755 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6756 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6757 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6758 }
6759 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6760 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6761 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6762 *DevCamDebug_af_search_tof_target_pos;
6763 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6764 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6765 }
6766 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6767 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6768 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6769 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6770 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6771 }
6772 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6773 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6774 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6775 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6776 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6777 }
6778 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6779 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6780 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6781 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6782 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6783 }
6784 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6785 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6786 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6787 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6788 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6789 }
6790 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6791 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6792 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6793 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6794 &fwk_DevCamDebug_af_search_type_select, 1);
6795 }
6796 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6797 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6798 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6799 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6800 &fwk_DevCamDebug_af_search_next_pos, 1);
6801 }
6802 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6803 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6804 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6805 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6806 &fwk_DevCamDebug_af_search_target_pos, 1);
6807 }
6808 // DevCamDebug metadata translateFromHalMetadata AEC
6809 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6810 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6811 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6812 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6813 }
6814 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6815 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6816 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6817 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6818 }
6819 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6820 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6821 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6822 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6823 }
6824 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6825 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6826 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6827 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6828 }
6829 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6830 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6831 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6832 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6833 }
6834 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6835 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6836 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6837 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6838 }
6839 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6840 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6841 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6842 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6843 }
6844 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6845 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6846 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6847 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6848 }
Samuel Ha34229982017-02-17 13:51:11 -08006849 // DevCamDebug metadata translateFromHalMetadata zzHDR
6850 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6851 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6852 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6853 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6854 }
6855 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6856 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006857 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006858 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6859 }
6860 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6861 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6862 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6863 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6864 }
6865 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6866 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006867 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006868 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6869 }
6870 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6871 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6872 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6873 *DevCamDebug_aec_hdr_sensitivity_ratio;
6874 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6875 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6876 }
6877 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6878 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6879 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6880 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6881 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6882 }
6883 // DevCamDebug metadata translateFromHalMetadata ADRC
6884 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6885 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6886 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6887 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6888 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6889 }
6890 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6891 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6892 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6893 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6894 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6895 }
6896 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6897 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6898 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6899 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6900 }
6901 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6902 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6903 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6904 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6905 }
6906 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6907 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6908 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6909 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6910 }
6911 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6912 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6913 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6914 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6915 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006916 // DevCamDebug metadata translateFromHalMetadata AWB
6917 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6918 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6919 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6920 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6921 }
6922 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6923 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6924 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6925 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6926 }
6927 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6928 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6929 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6930 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6931 }
6932 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6933 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6934 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6935 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6936 }
6937 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6938 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6939 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6940 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6941 }
6942 }
6943 // atrace_end(ATRACE_TAG_ALWAYS);
6944
Thierry Strudel3d639192016-09-09 11:52:26 -07006945 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6946 int64_t fwk_frame_number = *frame_number;
6947 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6948 }
6949
6950 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6951 int32_t fps_range[2];
6952 fps_range[0] = (int32_t)float_range->min_fps;
6953 fps_range[1] = (int32_t)float_range->max_fps;
6954 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6955 fps_range, 2);
6956 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6957 fps_range[0], fps_range[1]);
6958 }
6959
6960 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6961 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6962 }
6963
6964 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6965 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6966 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6967 *sceneMode);
6968 if (NAME_NOT_FOUND != val) {
6969 uint8_t fwkSceneMode = (uint8_t)val;
6970 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6971 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6972 fwkSceneMode);
6973 }
6974 }
6975
6976 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6977 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6978 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6979 }
6980
6981 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6982 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6983 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6984 }
6985
6986 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6987 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6988 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6989 }
6990
6991 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6992 CAM_INTF_META_EDGE_MODE, metadata) {
6993 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6994 }
6995
6996 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6997 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6998 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6999 }
7000
7001 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7002 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7003 }
7004
7005 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7006 if (0 <= *flashState) {
7007 uint8_t fwk_flashState = (uint8_t) *flashState;
7008 if (!gCamCapability[mCameraId]->flash_available) {
7009 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7010 }
7011 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7012 }
7013 }
7014
7015 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7016 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7017 if (NAME_NOT_FOUND != val) {
7018 uint8_t fwk_flashMode = (uint8_t)val;
7019 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7020 }
7021 }
7022
7023 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7024 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7025 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7026 }
7027
7028 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7029 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7030 }
7031
7032 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7033 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7034 }
7035
7036 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7037 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7038 }
7039
7040 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7041 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7042 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7043 }
7044
7045 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7046 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7047 LOGD("fwk_videoStab = %d", fwk_videoStab);
7048 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7049 } else {
7050 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7051 // and so hardcoding the Video Stab result to OFF mode.
7052 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7053 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007054 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007055 }
7056
7057 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7058 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7059 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7060 }
7061
7062 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7063 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7064 }
7065
Thierry Strudel3d639192016-09-09 11:52:26 -07007066 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7067 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007068 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007069
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007070 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7071 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007072
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007073 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007074 blackLevelAppliedPattern->cam_black_level[0],
7075 blackLevelAppliedPattern->cam_black_level[1],
7076 blackLevelAppliedPattern->cam_black_level[2],
7077 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007078 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7079 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007080
7081#ifndef USE_HAL_3_3
7082 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307083 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007084 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307085 fwk_blackLevelInd[0] /= 16.0;
7086 fwk_blackLevelInd[1] /= 16.0;
7087 fwk_blackLevelInd[2] /= 16.0;
7088 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007089 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7090 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007091#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007092 }
7093
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007094#ifndef USE_HAL_3_3
7095 // Fixed whitelevel is used by ISP/Sensor
7096 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7097 &gCamCapability[mCameraId]->white_level, 1);
7098#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007099
7100 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7101 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7102 int32_t scalerCropRegion[4];
7103 scalerCropRegion[0] = hScalerCropRegion->left;
7104 scalerCropRegion[1] = hScalerCropRegion->top;
7105 scalerCropRegion[2] = hScalerCropRegion->width;
7106 scalerCropRegion[3] = hScalerCropRegion->height;
7107
7108 // Adjust crop region from sensor output coordinate system to active
7109 // array coordinate system.
7110 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7111 scalerCropRegion[2], scalerCropRegion[3]);
7112
7113 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7114 }
7115
7116 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7117 LOGD("sensorExpTime = %lld", *sensorExpTime);
7118 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7119 }
7120
7121 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7122 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7123 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7124 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7125 }
7126
7127 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7128 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7129 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7130 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7131 sensorRollingShutterSkew, 1);
7132 }
7133
7134 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7135 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7136 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7137
7138 //calculate the noise profile based on sensitivity
7139 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7140 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7141 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7142 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7143 noise_profile[i] = noise_profile_S;
7144 noise_profile[i+1] = noise_profile_O;
7145 }
7146 LOGD("noise model entry (S, O) is (%f, %f)",
7147 noise_profile_S, noise_profile_O);
7148 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7149 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7150 }
7151
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007152#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007153 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007154 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007155 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007156 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007157 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7158 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7159 }
7160 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007161#endif
7162
Thierry Strudel3d639192016-09-09 11:52:26 -07007163 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7164 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7165 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7166 }
7167
7168 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7169 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7170 *faceDetectMode);
7171 if (NAME_NOT_FOUND != val) {
7172 uint8_t fwk_faceDetectMode = (uint8_t)val;
7173 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7174
7175 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7176 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7177 CAM_INTF_META_FACE_DETECTION, metadata) {
7178 uint8_t numFaces = MIN(
7179 faceDetectionInfo->num_faces_detected, MAX_ROI);
7180 int32_t faceIds[MAX_ROI];
7181 uint8_t faceScores[MAX_ROI];
7182 int32_t faceRectangles[MAX_ROI * 4];
7183 int32_t faceLandmarks[MAX_ROI * 6];
7184 size_t j = 0, k = 0;
7185
7186 for (size_t i = 0; i < numFaces; i++) {
7187 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7188 // Adjust crop region from sensor output coordinate system to active
7189 // array coordinate system.
7190 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7191 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7192 rect.width, rect.height);
7193
7194 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7195 faceRectangles+j, -1);
7196
Jason Lee8ce36fa2017-04-19 19:40:37 -07007197 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7198 "bottom-right (%d, %d)",
7199 faceDetectionInfo->frame_id, i,
7200 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7201 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7202
Thierry Strudel3d639192016-09-09 11:52:26 -07007203 j+= 4;
7204 }
7205 if (numFaces <= 0) {
7206 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7207 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7208 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7209 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7210 }
7211
7212 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7213 numFaces);
7214 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7215 faceRectangles, numFaces * 4U);
7216 if (fwk_faceDetectMode ==
7217 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7218 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7219 CAM_INTF_META_FACE_LANDMARK, metadata) {
7220
7221 for (size_t i = 0; i < numFaces; i++) {
7222 // Map the co-ordinate sensor output coordinate system to active
7223 // array coordinate system.
7224 mCropRegionMapper.toActiveArray(
7225 landmarks->face_landmarks[i].left_eye_center.x,
7226 landmarks->face_landmarks[i].left_eye_center.y);
7227 mCropRegionMapper.toActiveArray(
7228 landmarks->face_landmarks[i].right_eye_center.x,
7229 landmarks->face_landmarks[i].right_eye_center.y);
7230 mCropRegionMapper.toActiveArray(
7231 landmarks->face_landmarks[i].mouth_center.x,
7232 landmarks->face_landmarks[i].mouth_center.y);
7233
7234 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007235
7236 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7237 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7238 faceDetectionInfo->frame_id, i,
7239 faceLandmarks[k + LEFT_EYE_X],
7240 faceLandmarks[k + LEFT_EYE_Y],
7241 faceLandmarks[k + RIGHT_EYE_X],
7242 faceLandmarks[k + RIGHT_EYE_Y],
7243 faceLandmarks[k + MOUTH_X],
7244 faceLandmarks[k + MOUTH_Y]);
7245
Thierry Strudel04e026f2016-10-10 11:27:36 -07007246 k+= TOTAL_LANDMARK_INDICES;
7247 }
7248 } else {
7249 for (size_t i = 0; i < numFaces; i++) {
7250 setInvalidLandmarks(faceLandmarks+k);
7251 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007252 }
7253 }
7254
Jason Lee49619db2017-04-13 12:07:22 -07007255 for (size_t i = 0; i < numFaces; i++) {
7256 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7257
7258 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7259 faceDetectionInfo->frame_id, i, faceIds[i]);
7260 }
7261
Thierry Strudel3d639192016-09-09 11:52:26 -07007262 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7263 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7264 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007265 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007266 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7267 CAM_INTF_META_FACE_BLINK, metadata) {
7268 uint8_t detected[MAX_ROI];
7269 uint8_t degree[MAX_ROI * 2];
7270 for (size_t i = 0; i < numFaces; i++) {
7271 detected[i] = blinks->blink[i].blink_detected;
7272 degree[2 * i] = blinks->blink[i].left_blink;
7273 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007274
Jason Lee49619db2017-04-13 12:07:22 -07007275 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7276 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7277 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7278 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007279 }
7280 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7281 detected, numFaces);
7282 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7283 degree, numFaces * 2);
7284 }
7285 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7286 CAM_INTF_META_FACE_SMILE, metadata) {
7287 uint8_t degree[MAX_ROI];
7288 uint8_t confidence[MAX_ROI];
7289 for (size_t i = 0; i < numFaces; i++) {
7290 degree[i] = smiles->smile[i].smile_degree;
7291 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007292
Jason Lee49619db2017-04-13 12:07:22 -07007293 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7294 "smile_degree=%d, smile_score=%d",
7295 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007296 }
7297 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7298 degree, numFaces);
7299 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7300 confidence, numFaces);
7301 }
7302 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7303 CAM_INTF_META_FACE_GAZE, metadata) {
7304 int8_t angle[MAX_ROI];
7305 int32_t direction[MAX_ROI * 3];
7306 int8_t degree[MAX_ROI * 2];
7307 for (size_t i = 0; i < numFaces; i++) {
7308 angle[i] = gazes->gaze[i].gaze_angle;
7309 direction[3 * i] = gazes->gaze[i].updown_dir;
7310 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7311 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7312 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7313 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007314
7315 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7316 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7317 "left_right_gaze=%d, top_bottom_gaze=%d",
7318 faceDetectionInfo->frame_id, i, angle[i],
7319 direction[3 * i], direction[3 * i + 1],
7320 direction[3 * i + 2],
7321 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007322 }
7323 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7324 (uint8_t *)angle, numFaces);
7325 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7326 direction, numFaces * 3);
7327 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7328 (uint8_t *)degree, numFaces * 2);
7329 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007330 }
7331 }
7332 }
7333 }
7334
7335 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7336 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007337 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007338 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007339 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007340
Shuzhen Wang14415f52016-11-16 18:26:18 -08007341 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7342 histogramBins = *histBins;
7343 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7344 }
7345
7346 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007347 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7348 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007349 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007350
7351 switch (stats_data->type) {
7352 case CAM_HISTOGRAM_TYPE_BAYER:
7353 switch (stats_data->bayer_stats.data_type) {
7354 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007355 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7356 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007357 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007358 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7359 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007360 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007361 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7362 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007363 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007364 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007365 case CAM_STATS_CHANNEL_R:
7366 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007367 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7368 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007369 }
7370 break;
7371 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007372 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007373 break;
7374 }
7375
Shuzhen Wang14415f52016-11-16 18:26:18 -08007376 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007377 }
7378 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007379 }
7380
7381 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7382 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7383 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7384 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7385 }
7386
7387 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7388 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7389 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7390 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7391 }
7392
7393 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7394 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7395 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7396 CAM_MAX_SHADING_MAP_HEIGHT);
7397 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7398 CAM_MAX_SHADING_MAP_WIDTH);
7399 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7400 lensShadingMap->lens_shading, 4U * map_width * map_height);
7401 }
7402
7403 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7404 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7405 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7406 }
7407
7408 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7409 //Populate CAM_INTF_META_TONEMAP_CURVES
7410 /* ch0 = G, ch 1 = B, ch 2 = R*/
7411 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7412 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7413 tonemap->tonemap_points_cnt,
7414 CAM_MAX_TONEMAP_CURVE_SIZE);
7415 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7416 }
7417
7418 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7419 &tonemap->curves[0].tonemap_points[0][0],
7420 tonemap->tonemap_points_cnt * 2);
7421
7422 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7423 &tonemap->curves[1].tonemap_points[0][0],
7424 tonemap->tonemap_points_cnt * 2);
7425
7426 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7427 &tonemap->curves[2].tonemap_points[0][0],
7428 tonemap->tonemap_points_cnt * 2);
7429 }
7430
7431 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7432 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7433 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7434 CC_GAIN_MAX);
7435 }
7436
7437 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7438 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7439 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7440 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7441 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7442 }
7443
7444 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7445 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7446 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7447 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7448 toneCurve->tonemap_points_cnt,
7449 CAM_MAX_TONEMAP_CURVE_SIZE);
7450 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7451 }
7452 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7453 (float*)toneCurve->curve.tonemap_points,
7454 toneCurve->tonemap_points_cnt * 2);
7455 }
7456
7457 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7458 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7459 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7460 predColorCorrectionGains->gains, 4);
7461 }
7462
7463 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7464 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7465 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7466 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7467 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7468 }
7469
7470 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7471 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7472 }
7473
7474 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7475 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7476 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7477 }
7478
7479 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7480 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7481 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7482 }
7483
7484 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7485 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7486 *effectMode);
7487 if (NAME_NOT_FOUND != val) {
7488 uint8_t fwk_effectMode = (uint8_t)val;
7489 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7490 }
7491 }
7492
7493 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7494 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7495 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7496 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7497 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7498 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7499 }
7500 int32_t fwk_testPatternData[4];
7501 fwk_testPatternData[0] = testPatternData->r;
7502 fwk_testPatternData[3] = testPatternData->b;
7503 switch (gCamCapability[mCameraId]->color_arrangement) {
7504 case CAM_FILTER_ARRANGEMENT_RGGB:
7505 case CAM_FILTER_ARRANGEMENT_GRBG:
7506 fwk_testPatternData[1] = testPatternData->gr;
7507 fwk_testPatternData[2] = testPatternData->gb;
7508 break;
7509 case CAM_FILTER_ARRANGEMENT_GBRG:
7510 case CAM_FILTER_ARRANGEMENT_BGGR:
7511 fwk_testPatternData[2] = testPatternData->gr;
7512 fwk_testPatternData[1] = testPatternData->gb;
7513 break;
7514 default:
7515 LOGE("color arrangement %d is not supported",
7516 gCamCapability[mCameraId]->color_arrangement);
7517 break;
7518 }
7519 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7520 }
7521
7522 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7523 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7524 }
7525
7526 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7527 String8 str((const char *)gps_methods);
7528 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7529 }
7530
7531 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7532 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7533 }
7534
7535 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7536 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7537 }
7538
7539 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7540 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7541 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7542 }
7543
7544 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7545 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7546 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7547 }
7548
7549 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7550 int32_t fwk_thumb_size[2];
7551 fwk_thumb_size[0] = thumb_size->width;
7552 fwk_thumb_size[1] = thumb_size->height;
7553 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7554 }
7555
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007556 // Skip reprocess metadata if there is no input stream.
7557 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7558 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7559 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7560 privateData,
7561 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7562 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007563 }
7564
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007565 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007566 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007567 meteringMode, 1);
7568 }
7569
Thierry Strudel54dc9782017-02-15 12:12:10 -08007570 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7571 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7572 LOGD("hdr_scene_data: %d %f\n",
7573 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7574 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7575 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7576 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7577 &isHdr, 1);
7578 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7579 &isHdrConfidence, 1);
7580 }
7581
7582
7583
Thierry Strudel3d639192016-09-09 11:52:26 -07007584 if (metadata->is_tuning_params_valid) {
7585 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7586 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7587 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7588
7589
7590 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7591 sizeof(uint32_t));
7592 data += sizeof(uint32_t);
7593
7594 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7595 sizeof(uint32_t));
7596 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7597 data += sizeof(uint32_t);
7598
7599 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7600 sizeof(uint32_t));
7601 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7602 data += sizeof(uint32_t);
7603
7604 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7605 sizeof(uint32_t));
7606 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7607 data += sizeof(uint32_t);
7608
7609 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7610 sizeof(uint32_t));
7611 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7612 data += sizeof(uint32_t);
7613
7614 metadata->tuning_params.tuning_mod3_data_size = 0;
7615 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7616 sizeof(uint32_t));
7617 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7618 data += sizeof(uint32_t);
7619
7620 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7621 TUNING_SENSOR_DATA_MAX);
7622 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7623 count);
7624 data += count;
7625
7626 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7627 TUNING_VFE_DATA_MAX);
7628 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7629 count);
7630 data += count;
7631
7632 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7633 TUNING_CPP_DATA_MAX);
7634 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7635 count);
7636 data += count;
7637
7638 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7639 TUNING_CAC_DATA_MAX);
7640 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7641 count);
7642 data += count;
7643
7644 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7645 (int32_t *)(void *)tuning_meta_data_blob,
7646 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7647 }
7648
7649 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7650 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7651 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7652 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7653 NEUTRAL_COL_POINTS);
7654 }
7655
7656 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7657 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7658 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7659 }
7660
7661 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7662 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7663 // Adjust crop region from sensor output coordinate system to active
7664 // array coordinate system.
7665 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7666 hAeRegions->rect.width, hAeRegions->rect.height);
7667
7668 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7669 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7670 REGIONS_TUPLE_COUNT);
7671 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7672 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7673 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7674 hAeRegions->rect.height);
7675 }
7676
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007677 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7678 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7679 if (NAME_NOT_FOUND != val) {
7680 uint8_t fwkAfMode = (uint8_t)val;
7681 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7682 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7683 } else {
7684 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7685 val);
7686 }
7687 }
7688
Thierry Strudel3d639192016-09-09 11:52:26 -07007689 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7690 uint8_t fwk_afState = (uint8_t) *afState;
7691 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007692 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007693 }
7694
7695 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7696 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7697 }
7698
7699 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7700 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7701 }
7702
7703 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7704 uint8_t fwk_lensState = *lensState;
7705 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7706 }
7707
Thierry Strudel3d639192016-09-09 11:52:26 -07007708
7709 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007710 uint32_t ab_mode = *hal_ab_mode;
7711 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7712 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7713 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7714 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007715 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007716 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007717 if (NAME_NOT_FOUND != val) {
7718 uint8_t fwk_ab_mode = (uint8_t)val;
7719 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7720 }
7721 }
7722
7723 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7724 int val = lookupFwkName(SCENE_MODES_MAP,
7725 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7726 if (NAME_NOT_FOUND != val) {
7727 uint8_t fwkBestshotMode = (uint8_t)val;
7728 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7729 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7730 } else {
7731 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7732 }
7733 }
7734
7735 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7736 uint8_t fwk_mode = (uint8_t) *mode;
7737 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7738 }
7739
7740 /* Constant metadata values to be update*/
7741 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7742 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7743
7744 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7745 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7746
7747 int32_t hotPixelMap[2];
7748 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7749
7750 // CDS
7751 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7752 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7753 }
7754
Thierry Strudel04e026f2016-10-10 11:27:36 -07007755 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7756 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007757 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007758 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7759 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7760 } else {
7761 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7762 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007763
7764 if(fwk_hdr != curr_hdr_state) {
7765 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7766 if(fwk_hdr)
7767 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7768 else
7769 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7770 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007771 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7772 }
7773
Thierry Strudel54dc9782017-02-15 12:12:10 -08007774 //binning correction
7775 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7776 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7777 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7778 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7779 }
7780
Thierry Strudel04e026f2016-10-10 11:27:36 -07007781 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007782 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007783 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7784 int8_t is_ir_on = 0;
7785
7786 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7787 if(is_ir_on != curr_ir_state) {
7788 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7789 if(is_ir_on)
7790 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7791 else
7792 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7793 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007794 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007795 }
7796
Thierry Strudel269c81a2016-10-12 12:13:59 -07007797 // AEC SPEED
7798 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7799 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7800 }
7801
7802 // AWB SPEED
7803 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7804 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7805 }
7806
Thierry Strudel3d639192016-09-09 11:52:26 -07007807 // TNR
7808 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7809 uint8_t tnr_enable = tnr->denoise_enable;
7810 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007811 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7812 int8_t is_tnr_on = 0;
7813
7814 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7815 if(is_tnr_on != curr_tnr_state) {
7816 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7817 if(is_tnr_on)
7818 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7819 else
7820 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7821 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007822
7823 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7824 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7825 }
7826
7827 // Reprocess crop data
7828 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7829 uint8_t cnt = crop_data->num_of_streams;
7830 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7831 // mm-qcamera-daemon only posts crop_data for streams
7832 // not linked to pproc. So no valid crop metadata is not
7833 // necessarily an error case.
7834 LOGD("No valid crop metadata entries");
7835 } else {
7836 uint32_t reproc_stream_id;
7837 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7838 LOGD("No reprocessible stream found, ignore crop data");
7839 } else {
7840 int rc = NO_ERROR;
7841 Vector<int32_t> roi_map;
7842 int32_t *crop = new int32_t[cnt*4];
7843 if (NULL == crop) {
7844 rc = NO_MEMORY;
7845 }
7846 if (NO_ERROR == rc) {
7847 int32_t streams_found = 0;
7848 for (size_t i = 0; i < cnt; i++) {
7849 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7850 if (pprocDone) {
7851 // HAL already does internal reprocessing,
7852 // either via reprocessing before JPEG encoding,
7853 // or offline postprocessing for pproc bypass case.
7854 crop[0] = 0;
7855 crop[1] = 0;
7856 crop[2] = mInputStreamInfo.dim.width;
7857 crop[3] = mInputStreamInfo.dim.height;
7858 } else {
7859 crop[0] = crop_data->crop_info[i].crop.left;
7860 crop[1] = crop_data->crop_info[i].crop.top;
7861 crop[2] = crop_data->crop_info[i].crop.width;
7862 crop[3] = crop_data->crop_info[i].crop.height;
7863 }
7864 roi_map.add(crop_data->crop_info[i].roi_map.left);
7865 roi_map.add(crop_data->crop_info[i].roi_map.top);
7866 roi_map.add(crop_data->crop_info[i].roi_map.width);
7867 roi_map.add(crop_data->crop_info[i].roi_map.height);
7868 streams_found++;
7869 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7870 crop[0], crop[1], crop[2], crop[3]);
7871 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7872 crop_data->crop_info[i].roi_map.left,
7873 crop_data->crop_info[i].roi_map.top,
7874 crop_data->crop_info[i].roi_map.width,
7875 crop_data->crop_info[i].roi_map.height);
7876 break;
7877
7878 }
7879 }
7880 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7881 &streams_found, 1);
7882 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7883 crop, (size_t)(streams_found * 4));
7884 if (roi_map.array()) {
7885 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7886 roi_map.array(), roi_map.size());
7887 }
7888 }
7889 if (crop) {
7890 delete [] crop;
7891 }
7892 }
7893 }
7894 }
7895
7896 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7897 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7898 // so hardcoding the CAC result to OFF mode.
7899 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7900 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7901 } else {
7902 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7903 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7904 *cacMode);
7905 if (NAME_NOT_FOUND != val) {
7906 uint8_t resultCacMode = (uint8_t)val;
7907 // check whether CAC result from CB is equal to Framework set CAC mode
7908 // If not equal then set the CAC mode came in corresponding request
7909 if (fwk_cacMode != resultCacMode) {
7910 resultCacMode = fwk_cacMode;
7911 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007912 //Check if CAC is disabled by property
7913 if (m_cacModeDisabled) {
7914 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7915 }
7916
Thierry Strudel3d639192016-09-09 11:52:26 -07007917 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7918 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7919 } else {
7920 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7921 }
7922 }
7923 }
7924
7925 // Post blob of cam_cds_data through vendor tag.
7926 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7927 uint8_t cnt = cdsInfo->num_of_streams;
7928 cam_cds_data_t cdsDataOverride;
7929 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7930 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7931 cdsDataOverride.num_of_streams = 1;
7932 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7933 uint32_t reproc_stream_id;
7934 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7935 LOGD("No reprocessible stream found, ignore cds data");
7936 } else {
7937 for (size_t i = 0; i < cnt; i++) {
7938 if (cdsInfo->cds_info[i].stream_id ==
7939 reproc_stream_id) {
7940 cdsDataOverride.cds_info[0].cds_enable =
7941 cdsInfo->cds_info[i].cds_enable;
7942 break;
7943 }
7944 }
7945 }
7946 } else {
7947 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7948 }
7949 camMetadata.update(QCAMERA3_CDS_INFO,
7950 (uint8_t *)&cdsDataOverride,
7951 sizeof(cam_cds_data_t));
7952 }
7953
7954 // Ldaf calibration data
7955 if (!mLdafCalibExist) {
7956 IF_META_AVAILABLE(uint32_t, ldafCalib,
7957 CAM_INTF_META_LDAF_EXIF, metadata) {
7958 mLdafCalibExist = true;
7959 mLdafCalib[0] = ldafCalib[0];
7960 mLdafCalib[1] = ldafCalib[1];
7961 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7962 ldafCalib[0], ldafCalib[1]);
7963 }
7964 }
7965
Thierry Strudel54dc9782017-02-15 12:12:10 -08007966 // EXIF debug data through vendor tag
7967 /*
7968 * Mobicat Mask can assume 3 values:
7969 * 1 refers to Mobicat data,
7970 * 2 refers to Stats Debug and Exif Debug Data
7971 * 3 refers to Mobicat and Stats Debug Data
7972 * We want to make sure that we are sending Exif debug data
7973 * only when Mobicat Mask is 2.
7974 */
7975 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7976 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7977 (uint8_t *)(void *)mExifParams.debug_params,
7978 sizeof(mm_jpeg_debug_exif_params_t));
7979 }
7980
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007981 // Reprocess and DDM debug data through vendor tag
7982 cam_reprocess_info_t repro_info;
7983 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007984 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7985 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007986 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007987 }
7988 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7989 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007990 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007991 }
7992 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7993 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007994 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007995 }
7996 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7997 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007998 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007999 }
8000 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8001 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008002 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008003 }
8004 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008005 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008006 }
8007 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8008 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008009 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008010 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008011 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8012 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8013 }
8014 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8015 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8016 }
8017 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8018 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008019
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008020 // INSTANT AEC MODE
8021 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8022 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8023 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8024 }
8025
Shuzhen Wange763e802016-03-31 10:24:29 -07008026 // AF scene change
8027 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8028 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8029 }
8030
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008031 // Enable ZSL
8032 if (enableZsl != nullptr) {
8033 uint8_t value = *enableZsl ?
8034 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8035 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8036 }
8037
Xu Han821ea9c2017-05-23 09:00:40 -07008038 // OIS Data
8039 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8040 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8041 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8042 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8043 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8044 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8045 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8046 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8047 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8048 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8049 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8050 }
8051
Thierry Strudel3d639192016-09-09 11:52:26 -07008052 resultMetadata = camMetadata.release();
8053 return resultMetadata;
8054}
8055
8056/*===========================================================================
8057 * FUNCTION : saveExifParams
8058 *
8059 * DESCRIPTION:
8060 *
8061 * PARAMETERS :
8062 * @metadata : metadata information from callback
8063 *
8064 * RETURN : none
8065 *
8066 *==========================================================================*/
8067void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8068{
8069 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8070 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8071 if (mExifParams.debug_params) {
8072 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8073 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8074 }
8075 }
8076 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8077 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8078 if (mExifParams.debug_params) {
8079 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8080 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8081 }
8082 }
8083 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8084 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8085 if (mExifParams.debug_params) {
8086 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8087 mExifParams.debug_params->af_debug_params_valid = TRUE;
8088 }
8089 }
8090 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8091 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8092 if (mExifParams.debug_params) {
8093 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8094 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8095 }
8096 }
8097 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8098 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8099 if (mExifParams.debug_params) {
8100 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8101 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8102 }
8103 }
8104 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8105 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8106 if (mExifParams.debug_params) {
8107 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8108 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8109 }
8110 }
8111 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8112 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8113 if (mExifParams.debug_params) {
8114 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8115 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8116 }
8117 }
8118 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8119 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8120 if (mExifParams.debug_params) {
8121 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8122 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8123 }
8124 }
8125}
8126
8127/*===========================================================================
8128 * FUNCTION : get3AExifParams
8129 *
8130 * DESCRIPTION:
8131 *
8132 * PARAMETERS : none
8133 *
8134 *
8135 * RETURN : mm_jpeg_exif_params_t
8136 *
8137 *==========================================================================*/
8138mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8139{
8140 return mExifParams;
8141}
8142
8143/*===========================================================================
8144 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8145 *
8146 * DESCRIPTION:
8147 *
8148 * PARAMETERS :
8149 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008150 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8151 * urgent metadata in a batch. Always true for
8152 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008153 *
8154 * RETURN : camera_metadata_t*
8155 * metadata in a format specified by fwk
8156 *==========================================================================*/
8157camera_metadata_t*
8158QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008159 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008160{
8161 CameraMetadata camMetadata;
8162 camera_metadata_t *resultMetadata;
8163
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008164 if (!lastUrgentMetadataInBatch) {
8165 /* In batch mode, use empty metadata if this is not the last in batch
8166 */
8167 resultMetadata = allocate_camera_metadata(0, 0);
8168 return resultMetadata;
8169 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008170
8171 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8172 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8173 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8174 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8175 }
8176
8177 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8178 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8179 &aecTrigger->trigger, 1);
8180 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8181 &aecTrigger->trigger_id, 1);
8182 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8183 aecTrigger->trigger);
8184 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8185 aecTrigger->trigger_id);
8186 }
8187
8188 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8189 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8190 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8191 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8192 }
8193
Thierry Strudel3d639192016-09-09 11:52:26 -07008194 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8195 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8196 &af_trigger->trigger, 1);
8197 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8198 af_trigger->trigger);
8199 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8200 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8201 af_trigger->trigger_id);
8202 }
8203
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008204 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8205 /*af regions*/
8206 int32_t afRegions[REGIONS_TUPLE_COUNT];
8207 // Adjust crop region from sensor output coordinate system to active
8208 // array coordinate system.
8209 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8210 hAfRegions->rect.width, hAfRegions->rect.height);
8211
8212 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8213 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8214 REGIONS_TUPLE_COUNT);
8215 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8216 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8217 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8218 hAfRegions->rect.height);
8219 }
8220
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008221 // AF region confidence
8222 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8223 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8224 }
8225
Thierry Strudel3d639192016-09-09 11:52:26 -07008226 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8227 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8228 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8229 if (NAME_NOT_FOUND != val) {
8230 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8231 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8232 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8233 } else {
8234 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8235 }
8236 }
8237
8238 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8239 uint32_t aeMode = CAM_AE_MODE_MAX;
8240 int32_t flashMode = CAM_FLASH_MODE_MAX;
8241 int32_t redeye = -1;
8242 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8243 aeMode = *pAeMode;
8244 }
8245 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8246 flashMode = *pFlashMode;
8247 }
8248 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8249 redeye = *pRedeye;
8250 }
8251
8252 if (1 == redeye) {
8253 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8254 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8255 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8256 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8257 flashMode);
8258 if (NAME_NOT_FOUND != val) {
8259 fwk_aeMode = (uint8_t)val;
8260 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8261 } else {
8262 LOGE("Unsupported flash mode %d", flashMode);
8263 }
8264 } else if (aeMode == CAM_AE_MODE_ON) {
8265 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8266 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8267 } else if (aeMode == CAM_AE_MODE_OFF) {
8268 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8269 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008270 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8271 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8272 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008273 } else {
8274 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8275 "flashMode:%d, aeMode:%u!!!",
8276 redeye, flashMode, aeMode);
8277 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008278 if (mInstantAEC) {
8279 // Increment frame Idx count untill a bound reached for instant AEC.
8280 mInstantAecFrameIdxCount++;
8281 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8282 CAM_INTF_META_AEC_INFO, metadata) {
8283 LOGH("ae_params->settled = %d",ae_params->settled);
8284 // If AEC settled, or if number of frames reached bound value,
8285 // should reset instant AEC.
8286 if (ae_params->settled ||
8287 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8288 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8289 mInstantAEC = false;
8290 mResetInstantAEC = true;
8291 mInstantAecFrameIdxCount = 0;
8292 }
8293 }
8294 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008295 resultMetadata = camMetadata.release();
8296 return resultMetadata;
8297}
8298
8299/*===========================================================================
8300 * FUNCTION : dumpMetadataToFile
8301 *
8302 * DESCRIPTION: Dumps tuning metadata to file system
8303 *
8304 * PARAMETERS :
8305 * @meta : tuning metadata
8306 * @dumpFrameCount : current dump frame count
8307 * @enabled : Enable mask
8308 *
8309 *==========================================================================*/
8310void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8311 uint32_t &dumpFrameCount,
8312 bool enabled,
8313 const char *type,
8314 uint32_t frameNumber)
8315{
8316 //Some sanity checks
8317 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8318 LOGE("Tuning sensor data size bigger than expected %d: %d",
8319 meta.tuning_sensor_data_size,
8320 TUNING_SENSOR_DATA_MAX);
8321 return;
8322 }
8323
8324 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8325 LOGE("Tuning VFE data size bigger than expected %d: %d",
8326 meta.tuning_vfe_data_size,
8327 TUNING_VFE_DATA_MAX);
8328 return;
8329 }
8330
8331 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8332 LOGE("Tuning CPP data size bigger than expected %d: %d",
8333 meta.tuning_cpp_data_size,
8334 TUNING_CPP_DATA_MAX);
8335 return;
8336 }
8337
8338 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8339 LOGE("Tuning CAC data size bigger than expected %d: %d",
8340 meta.tuning_cac_data_size,
8341 TUNING_CAC_DATA_MAX);
8342 return;
8343 }
8344 //
8345
8346 if(enabled){
8347 char timeBuf[FILENAME_MAX];
8348 char buf[FILENAME_MAX];
8349 memset(buf, 0, sizeof(buf));
8350 memset(timeBuf, 0, sizeof(timeBuf));
8351 time_t current_time;
8352 struct tm * timeinfo;
8353 time (&current_time);
8354 timeinfo = localtime (&current_time);
8355 if (timeinfo != NULL) {
8356 strftime (timeBuf, sizeof(timeBuf),
8357 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8358 }
8359 String8 filePath(timeBuf);
8360 snprintf(buf,
8361 sizeof(buf),
8362 "%dm_%s_%d.bin",
8363 dumpFrameCount,
8364 type,
8365 frameNumber);
8366 filePath.append(buf);
8367 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8368 if (file_fd >= 0) {
8369 ssize_t written_len = 0;
8370 meta.tuning_data_version = TUNING_DATA_VERSION;
8371 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8372 written_len += write(file_fd, data, sizeof(uint32_t));
8373 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8374 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8375 written_len += write(file_fd, data, sizeof(uint32_t));
8376 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8377 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8378 written_len += write(file_fd, data, sizeof(uint32_t));
8379 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8380 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8381 written_len += write(file_fd, data, sizeof(uint32_t));
8382 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8383 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8384 written_len += write(file_fd, data, sizeof(uint32_t));
8385 meta.tuning_mod3_data_size = 0;
8386 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8387 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8388 written_len += write(file_fd, data, sizeof(uint32_t));
8389 size_t total_size = meta.tuning_sensor_data_size;
8390 data = (void *)((uint8_t *)&meta.data);
8391 written_len += write(file_fd, data, total_size);
8392 total_size = meta.tuning_vfe_data_size;
8393 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8394 written_len += write(file_fd, data, total_size);
8395 total_size = meta.tuning_cpp_data_size;
8396 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8397 written_len += write(file_fd, data, total_size);
8398 total_size = meta.tuning_cac_data_size;
8399 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8400 written_len += write(file_fd, data, total_size);
8401 close(file_fd);
8402 }else {
8403 LOGE("fail to open file for metadata dumping");
8404 }
8405 }
8406}
8407
8408/*===========================================================================
8409 * FUNCTION : cleanAndSortStreamInfo
8410 *
8411 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8412 * and sort them such that raw stream is at the end of the list
8413 * This is a workaround for camera daemon constraint.
8414 *
8415 * PARAMETERS : None
8416 *
8417 *==========================================================================*/
8418void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8419{
8420 List<stream_info_t *> newStreamInfo;
8421
8422 /*clean up invalid streams*/
8423 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8424 it != mStreamInfo.end();) {
8425 if(((*it)->status) == INVALID){
8426 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8427 delete channel;
8428 free(*it);
8429 it = mStreamInfo.erase(it);
8430 } else {
8431 it++;
8432 }
8433 }
8434
8435 // Move preview/video/callback/snapshot streams into newList
8436 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8437 it != mStreamInfo.end();) {
8438 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8439 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8440 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8441 newStreamInfo.push_back(*it);
8442 it = mStreamInfo.erase(it);
8443 } else
8444 it++;
8445 }
8446 // Move raw streams into newList
8447 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8448 it != mStreamInfo.end();) {
8449 newStreamInfo.push_back(*it);
8450 it = mStreamInfo.erase(it);
8451 }
8452
8453 mStreamInfo = newStreamInfo;
8454}
8455
8456/*===========================================================================
8457 * FUNCTION : extractJpegMetadata
8458 *
8459 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8460 * JPEG metadata is cached in HAL, and return as part of capture
8461 * result when metadata is returned from camera daemon.
8462 *
8463 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8464 * @request: capture request
8465 *
8466 *==========================================================================*/
8467void QCamera3HardwareInterface::extractJpegMetadata(
8468 CameraMetadata& jpegMetadata,
8469 const camera3_capture_request_t *request)
8470{
8471 CameraMetadata frame_settings;
8472 frame_settings = request->settings;
8473
8474 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8475 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8476 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8477 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8478
8479 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8480 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8481 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8482 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8483
8484 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8485 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8486 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8487 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8488
8489 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8490 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8491 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8492 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8493
8494 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8495 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8496 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8497 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8498
8499 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8500 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8501 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8502 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8503
8504 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8505 int32_t thumbnail_size[2];
8506 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8507 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8508 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8509 int32_t orientation =
8510 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008511 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008512 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8513 int32_t temp;
8514 temp = thumbnail_size[0];
8515 thumbnail_size[0] = thumbnail_size[1];
8516 thumbnail_size[1] = temp;
8517 }
8518 }
8519 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8520 thumbnail_size,
8521 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8522 }
8523
8524}
8525
8526/*===========================================================================
8527 * FUNCTION : convertToRegions
8528 *
8529 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8530 *
8531 * PARAMETERS :
8532 * @rect : cam_rect_t struct to convert
8533 * @region : int32_t destination array
8534 * @weight : if we are converting from cam_area_t, weight is valid
8535 * else weight = -1
8536 *
8537 *==========================================================================*/
8538void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8539 int32_t *region, int weight)
8540{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008541 region[FACE_LEFT] = rect.left;
8542 region[FACE_TOP] = rect.top;
8543 region[FACE_RIGHT] = rect.left + rect.width;
8544 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008545 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008546 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008547 }
8548}
8549
8550/*===========================================================================
8551 * FUNCTION : convertFromRegions
8552 *
8553 * DESCRIPTION: helper method to convert from array to cam_rect_t
8554 *
8555 * PARAMETERS :
8556 * @rect : cam_rect_t struct to convert
8557 * @region : int32_t destination array
8558 * @weight : if we are converting from cam_area_t, weight is valid
8559 * else weight = -1
8560 *
8561 *==========================================================================*/
8562void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008563 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008564{
Thierry Strudel3d639192016-09-09 11:52:26 -07008565 int32_t x_min = frame_settings.find(tag).data.i32[0];
8566 int32_t y_min = frame_settings.find(tag).data.i32[1];
8567 int32_t x_max = frame_settings.find(tag).data.i32[2];
8568 int32_t y_max = frame_settings.find(tag).data.i32[3];
8569 roi.weight = frame_settings.find(tag).data.i32[4];
8570 roi.rect.left = x_min;
8571 roi.rect.top = y_min;
8572 roi.rect.width = x_max - x_min;
8573 roi.rect.height = y_max - y_min;
8574}
8575
8576/*===========================================================================
8577 * FUNCTION : resetIfNeededROI
8578 *
8579 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8580 * crop region
8581 *
8582 * PARAMETERS :
8583 * @roi : cam_area_t struct to resize
8584 * @scalerCropRegion : cam_crop_region_t region to compare against
8585 *
8586 *
8587 *==========================================================================*/
8588bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8589 const cam_crop_region_t* scalerCropRegion)
8590{
8591 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8592 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8593 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8594 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8595
8596 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8597 * without having this check the calculations below to validate if the roi
8598 * is inside scalar crop region will fail resulting in the roi not being
8599 * reset causing algorithm to continue to use stale roi window
8600 */
8601 if (roi->weight == 0) {
8602 return true;
8603 }
8604
8605 if ((roi_x_max < scalerCropRegion->left) ||
8606 // right edge of roi window is left of scalar crop's left edge
8607 (roi_y_max < scalerCropRegion->top) ||
8608 // bottom edge of roi window is above scalar crop's top edge
8609 (roi->rect.left > crop_x_max) ||
8610 // left edge of roi window is beyond(right) of scalar crop's right edge
8611 (roi->rect.top > crop_y_max)){
8612 // top edge of roi windo is above scalar crop's top edge
8613 return false;
8614 }
8615 if (roi->rect.left < scalerCropRegion->left) {
8616 roi->rect.left = scalerCropRegion->left;
8617 }
8618 if (roi->rect.top < scalerCropRegion->top) {
8619 roi->rect.top = scalerCropRegion->top;
8620 }
8621 if (roi_x_max > crop_x_max) {
8622 roi_x_max = crop_x_max;
8623 }
8624 if (roi_y_max > crop_y_max) {
8625 roi_y_max = crop_y_max;
8626 }
8627 roi->rect.width = roi_x_max - roi->rect.left;
8628 roi->rect.height = roi_y_max - roi->rect.top;
8629 return true;
8630}
8631
8632/*===========================================================================
8633 * FUNCTION : convertLandmarks
8634 *
8635 * DESCRIPTION: helper method to extract the landmarks from face detection info
8636 *
8637 * PARAMETERS :
8638 * @landmark_data : input landmark data to be converted
8639 * @landmarks : int32_t destination array
8640 *
8641 *
8642 *==========================================================================*/
8643void QCamera3HardwareInterface::convertLandmarks(
8644 cam_face_landmarks_info_t landmark_data,
8645 int32_t *landmarks)
8646{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008647 if (landmark_data.is_left_eye_valid) {
8648 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8649 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8650 } else {
8651 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8652 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8653 }
8654
8655 if (landmark_data.is_right_eye_valid) {
8656 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8657 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8658 } else {
8659 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8660 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8661 }
8662
8663 if (landmark_data.is_mouth_valid) {
8664 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8665 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8666 } else {
8667 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8668 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8669 }
8670}
8671
8672/*===========================================================================
8673 * FUNCTION : setInvalidLandmarks
8674 *
8675 * DESCRIPTION: helper method to set invalid landmarks
8676 *
8677 * PARAMETERS :
8678 * @landmarks : int32_t destination array
8679 *
8680 *
8681 *==========================================================================*/
8682void QCamera3HardwareInterface::setInvalidLandmarks(
8683 int32_t *landmarks)
8684{
8685 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8686 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8687 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8688 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8689 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8690 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008691}
8692
8693#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008694
8695/*===========================================================================
8696 * FUNCTION : getCapabilities
8697 *
8698 * DESCRIPTION: query camera capability from back-end
8699 *
8700 * PARAMETERS :
8701 * @ops : mm-interface ops structure
8702 * @cam_handle : camera handle for which we need capability
8703 *
8704 * RETURN : ptr type of capability structure
8705 * capability for success
8706 * NULL for failure
8707 *==========================================================================*/
8708cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8709 uint32_t cam_handle)
8710{
8711 int rc = NO_ERROR;
8712 QCamera3HeapMemory *capabilityHeap = NULL;
8713 cam_capability_t *cap_ptr = NULL;
8714
8715 if (ops == NULL) {
8716 LOGE("Invalid arguments");
8717 return NULL;
8718 }
8719
8720 capabilityHeap = new QCamera3HeapMemory(1);
8721 if (capabilityHeap == NULL) {
8722 LOGE("creation of capabilityHeap failed");
8723 return NULL;
8724 }
8725
8726 /* Allocate memory for capability buffer */
8727 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8728 if(rc != OK) {
8729 LOGE("No memory for cappability");
8730 goto allocate_failed;
8731 }
8732
8733 /* Map memory for capability buffer */
8734 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8735
8736 rc = ops->map_buf(cam_handle,
8737 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8738 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8739 if(rc < 0) {
8740 LOGE("failed to map capability buffer");
8741 rc = FAILED_TRANSACTION;
8742 goto map_failed;
8743 }
8744
8745 /* Query Capability */
8746 rc = ops->query_capability(cam_handle);
8747 if(rc < 0) {
8748 LOGE("failed to query capability");
8749 rc = FAILED_TRANSACTION;
8750 goto query_failed;
8751 }
8752
8753 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8754 if (cap_ptr == NULL) {
8755 LOGE("out of memory");
8756 rc = NO_MEMORY;
8757 goto query_failed;
8758 }
8759
8760 memset(cap_ptr, 0, sizeof(cam_capability_t));
8761 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8762
8763 int index;
8764 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8765 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8766 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8767 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8768 }
8769
8770query_failed:
8771 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8772map_failed:
8773 capabilityHeap->deallocate();
8774allocate_failed:
8775 delete capabilityHeap;
8776
8777 if (rc != NO_ERROR) {
8778 return NULL;
8779 } else {
8780 return cap_ptr;
8781 }
8782}
8783
Thierry Strudel3d639192016-09-09 11:52:26 -07008784/*===========================================================================
8785 * FUNCTION : initCapabilities
8786 *
8787 * DESCRIPTION: initialize camera capabilities in static data struct
8788 *
8789 * PARAMETERS :
8790 * @cameraId : camera Id
8791 *
8792 * RETURN : int32_t type of status
8793 * NO_ERROR -- success
8794 * none-zero failure code
8795 *==========================================================================*/
8796int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8797{
8798 int rc = 0;
8799 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008800 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008801
8802 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8803 if (rc) {
8804 LOGE("camera_open failed. rc = %d", rc);
8805 goto open_failed;
8806 }
8807 if (!cameraHandle) {
8808 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8809 goto open_failed;
8810 }
8811
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008812 handle = get_main_camera_handle(cameraHandle->camera_handle);
8813 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8814 if (gCamCapability[cameraId] == NULL) {
8815 rc = FAILED_TRANSACTION;
8816 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008817 }
8818
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008819 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008820 if (is_dual_camera_by_idx(cameraId)) {
8821 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8822 gCamCapability[cameraId]->aux_cam_cap =
8823 getCapabilities(cameraHandle->ops, handle);
8824 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8825 rc = FAILED_TRANSACTION;
8826 free(gCamCapability[cameraId]);
8827 goto failed_op;
8828 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008829
8830 // Copy the main camera capability to main_cam_cap struct
8831 gCamCapability[cameraId]->main_cam_cap =
8832 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8833 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8834 LOGE("out of memory");
8835 rc = NO_MEMORY;
8836 goto failed_op;
8837 }
8838 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8839 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008840 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008841failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008842 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8843 cameraHandle = NULL;
8844open_failed:
8845 return rc;
8846}
8847
8848/*==========================================================================
8849 * FUNCTION : get3Aversion
8850 *
8851 * DESCRIPTION: get the Q3A S/W version
8852 *
8853 * PARAMETERS :
8854 * @sw_version: Reference of Q3A structure which will hold version info upon
8855 * return
8856 *
8857 * RETURN : None
8858 *
8859 *==========================================================================*/
8860void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8861{
8862 if(gCamCapability[mCameraId])
8863 sw_version = gCamCapability[mCameraId]->q3a_version;
8864 else
8865 LOGE("Capability structure NULL!");
8866}
8867
8868
8869/*===========================================================================
8870 * FUNCTION : initParameters
8871 *
8872 * DESCRIPTION: initialize camera parameters
8873 *
8874 * PARAMETERS :
8875 *
8876 * RETURN : int32_t type of status
8877 * NO_ERROR -- success
8878 * none-zero failure code
8879 *==========================================================================*/
8880int QCamera3HardwareInterface::initParameters()
8881{
8882 int rc = 0;
8883
8884 //Allocate Set Param Buffer
8885 mParamHeap = new QCamera3HeapMemory(1);
8886 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8887 if(rc != OK) {
8888 rc = NO_MEMORY;
8889 LOGE("Failed to allocate SETPARM Heap memory");
8890 delete mParamHeap;
8891 mParamHeap = NULL;
8892 return rc;
8893 }
8894
8895 //Map memory for parameters buffer
8896 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8897 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8898 mParamHeap->getFd(0),
8899 sizeof(metadata_buffer_t),
8900 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8901 if(rc < 0) {
8902 LOGE("failed to map SETPARM buffer");
8903 rc = FAILED_TRANSACTION;
8904 mParamHeap->deallocate();
8905 delete mParamHeap;
8906 mParamHeap = NULL;
8907 return rc;
8908 }
8909
8910 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8911
8912 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8913 return rc;
8914}
8915
8916/*===========================================================================
8917 * FUNCTION : deinitParameters
8918 *
8919 * DESCRIPTION: de-initialize camera parameters
8920 *
8921 * PARAMETERS :
8922 *
8923 * RETURN : NONE
8924 *==========================================================================*/
8925void QCamera3HardwareInterface::deinitParameters()
8926{
8927 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8928 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8929
8930 mParamHeap->deallocate();
8931 delete mParamHeap;
8932 mParamHeap = NULL;
8933
8934 mParameters = NULL;
8935
8936 free(mPrevParameters);
8937 mPrevParameters = NULL;
8938}
8939
8940/*===========================================================================
8941 * FUNCTION : calcMaxJpegSize
8942 *
8943 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8944 *
8945 * PARAMETERS :
8946 *
8947 * RETURN : max_jpeg_size
8948 *==========================================================================*/
8949size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8950{
8951 size_t max_jpeg_size = 0;
8952 size_t temp_width, temp_height;
8953 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8954 MAX_SIZES_CNT);
8955 for (size_t i = 0; i < count; i++) {
8956 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8957 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8958 if (temp_width * temp_height > max_jpeg_size ) {
8959 max_jpeg_size = temp_width * temp_height;
8960 }
8961 }
8962 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8963 return max_jpeg_size;
8964}
8965
8966/*===========================================================================
8967 * FUNCTION : getMaxRawSize
8968 *
8969 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8970 *
8971 * PARAMETERS :
8972 *
8973 * RETURN : Largest supported Raw Dimension
8974 *==========================================================================*/
8975cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8976{
8977 int max_width = 0;
8978 cam_dimension_t maxRawSize;
8979
8980 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8981 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8982 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8983 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8984 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8985 }
8986 }
8987 return maxRawSize;
8988}
8989
8990
8991/*===========================================================================
8992 * FUNCTION : calcMaxJpegDim
8993 *
8994 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8995 *
8996 * PARAMETERS :
8997 *
8998 * RETURN : max_jpeg_dim
8999 *==========================================================================*/
9000cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9001{
9002 cam_dimension_t max_jpeg_dim;
9003 cam_dimension_t curr_jpeg_dim;
9004 max_jpeg_dim.width = 0;
9005 max_jpeg_dim.height = 0;
9006 curr_jpeg_dim.width = 0;
9007 curr_jpeg_dim.height = 0;
9008 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9009 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9010 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9011 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9012 max_jpeg_dim.width * max_jpeg_dim.height ) {
9013 max_jpeg_dim.width = curr_jpeg_dim.width;
9014 max_jpeg_dim.height = curr_jpeg_dim.height;
9015 }
9016 }
9017 return max_jpeg_dim;
9018}
9019
9020/*===========================================================================
9021 * FUNCTION : addStreamConfig
9022 *
9023 * DESCRIPTION: adds the stream configuration to the array
9024 *
9025 * PARAMETERS :
9026 * @available_stream_configs : pointer to stream configuration array
9027 * @scalar_format : scalar format
9028 * @dim : configuration dimension
9029 * @config_type : input or output configuration type
9030 *
9031 * RETURN : NONE
9032 *==========================================================================*/
9033void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9034 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9035{
9036 available_stream_configs.add(scalar_format);
9037 available_stream_configs.add(dim.width);
9038 available_stream_configs.add(dim.height);
9039 available_stream_configs.add(config_type);
9040}
9041
9042/*===========================================================================
9043 * FUNCTION : suppportBurstCapture
9044 *
9045 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9046 *
9047 * PARAMETERS :
9048 * @cameraId : camera Id
9049 *
9050 * RETURN : true if camera supports BURST_CAPTURE
9051 * false otherwise
9052 *==========================================================================*/
9053bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9054{
9055 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9056 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9057 const int32_t highResWidth = 3264;
9058 const int32_t highResHeight = 2448;
9059
9060 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9061 // Maximum resolution images cannot be captured at >= 10fps
9062 // -> not supporting BURST_CAPTURE
9063 return false;
9064 }
9065
9066 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9067 // Maximum resolution images can be captured at >= 20fps
9068 // --> supporting BURST_CAPTURE
9069 return true;
9070 }
9071
9072 // Find the smallest highRes resolution, or largest resolution if there is none
9073 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9074 MAX_SIZES_CNT);
9075 size_t highRes = 0;
9076 while ((highRes + 1 < totalCnt) &&
9077 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9078 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9079 highResWidth * highResHeight)) {
9080 highRes++;
9081 }
9082 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9083 return true;
9084 } else {
9085 return false;
9086 }
9087}
9088
9089/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009090 * FUNCTION : getPDStatIndex
9091 *
9092 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9093 *
9094 * PARAMETERS :
9095 * @caps : camera capabilities
9096 *
9097 * RETURN : int32_t type
9098 * non-negative - on success
9099 * -1 - on failure
9100 *==========================================================================*/
9101int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9102 if (nullptr == caps) {
9103 return -1;
9104 }
9105
9106 uint32_t metaRawCount = caps->meta_raw_channel_count;
9107 int32_t ret = -1;
9108 for (size_t i = 0; i < metaRawCount; i++) {
9109 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9110 ret = i;
9111 break;
9112 }
9113 }
9114
9115 return ret;
9116}
9117
9118/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009119 * FUNCTION : initStaticMetadata
9120 *
9121 * DESCRIPTION: initialize the static metadata
9122 *
9123 * PARAMETERS :
9124 * @cameraId : camera Id
9125 *
9126 * RETURN : int32_t type of status
9127 * 0 -- success
9128 * non-zero failure code
9129 *==========================================================================*/
9130int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9131{
9132 int rc = 0;
9133 CameraMetadata staticInfo;
9134 size_t count = 0;
9135 bool limitedDevice = false;
9136 char prop[PROPERTY_VALUE_MAX];
9137 bool supportBurst = false;
9138
9139 supportBurst = supportBurstCapture(cameraId);
9140
9141 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9142 * guaranteed or if min fps of max resolution is less than 20 fps, its
9143 * advertised as limited device*/
9144 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9145 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9146 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9147 !supportBurst;
9148
9149 uint8_t supportedHwLvl = limitedDevice ?
9150 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009151#ifndef USE_HAL_3_3
9152 // LEVEL_3 - This device will support level 3.
9153 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9154#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009155 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009156#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009157
9158 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9159 &supportedHwLvl, 1);
9160
9161 bool facingBack = false;
9162 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9163 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9164 facingBack = true;
9165 }
9166 /*HAL 3 only*/
9167 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9168 &gCamCapability[cameraId]->min_focus_distance, 1);
9169
9170 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9171 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9172
9173 /*should be using focal lengths but sensor doesn't provide that info now*/
9174 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9175 &gCamCapability[cameraId]->focal_length,
9176 1);
9177
9178 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9179 gCamCapability[cameraId]->apertures,
9180 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9181
9182 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9183 gCamCapability[cameraId]->filter_densities,
9184 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9185
9186
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009187 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9188 size_t mode_count =
9189 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9190 for (size_t i = 0; i < mode_count; i++) {
9191 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9192 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009193 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009194 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009195
9196 int32_t lens_shading_map_size[] = {
9197 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9198 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9199 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9200 lens_shading_map_size,
9201 sizeof(lens_shading_map_size)/sizeof(int32_t));
9202
9203 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9204 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9205
9206 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9207 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9208
9209 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9210 &gCamCapability[cameraId]->max_frame_duration, 1);
9211
9212 camera_metadata_rational baseGainFactor = {
9213 gCamCapability[cameraId]->base_gain_factor.numerator,
9214 gCamCapability[cameraId]->base_gain_factor.denominator};
9215 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9216 &baseGainFactor, 1);
9217
9218 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9219 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9220
9221 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9222 gCamCapability[cameraId]->pixel_array_size.height};
9223 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9224 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9225
9226 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9227 gCamCapability[cameraId]->active_array_size.top,
9228 gCamCapability[cameraId]->active_array_size.width,
9229 gCamCapability[cameraId]->active_array_size.height};
9230 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9231 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9232
9233 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9234 &gCamCapability[cameraId]->white_level, 1);
9235
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009236 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9237 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9238 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009239 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009240 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009241
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009242#ifndef USE_HAL_3_3
9243 bool hasBlackRegions = false;
9244 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9245 LOGW("black_region_count: %d is bounded to %d",
9246 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9247 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9248 }
9249 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9250 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9251 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9252 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9253 }
9254 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9255 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9256 hasBlackRegions = true;
9257 }
9258#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009259 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9260 &gCamCapability[cameraId]->flash_charge_duration, 1);
9261
9262 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9263 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9264
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009265 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9266 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9267 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009268 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9269 &timestampSource, 1);
9270
Thierry Strudel54dc9782017-02-15 12:12:10 -08009271 //update histogram vendor data
9272 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009273 &gCamCapability[cameraId]->histogram_size, 1);
9274
Thierry Strudel54dc9782017-02-15 12:12:10 -08009275 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009276 &gCamCapability[cameraId]->max_histogram_count, 1);
9277
Shuzhen Wang14415f52016-11-16 18:26:18 -08009278 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9279 //so that app can request fewer number of bins than the maximum supported.
9280 std::vector<int32_t> histBins;
9281 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9282 histBins.push_back(maxHistBins);
9283 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9284 (maxHistBins & 0x1) == 0) {
9285 histBins.push_back(maxHistBins >> 1);
9286 maxHistBins >>= 1;
9287 }
9288 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9289 histBins.data(), histBins.size());
9290
Thierry Strudel3d639192016-09-09 11:52:26 -07009291 int32_t sharpness_map_size[] = {
9292 gCamCapability[cameraId]->sharpness_map_size.width,
9293 gCamCapability[cameraId]->sharpness_map_size.height};
9294
9295 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9296 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9297
9298 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9299 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9300
Emilian Peev0f3c3162017-03-15 12:57:46 +00009301 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9302 if (0 <= indexPD) {
9303 // Advertise PD stats data as part of the Depth capabilities
9304 int32_t depthWidth =
9305 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9306 int32_t depthHeight =
9307 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009308 int32_t depthStride =
9309 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009310 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9311 assert(0 < depthSamplesCount);
9312 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9313 &depthSamplesCount, 1);
9314
9315 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9316 depthHeight,
9317 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9318 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9319 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9320 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9321 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9322
9323 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9324 depthHeight, 33333333,
9325 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9326 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9327 depthMinDuration,
9328 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9329
9330 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9331 depthHeight, 0,
9332 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9333 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9334 depthStallDuration,
9335 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9336
9337 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9338 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009339
9340 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9341 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9342 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009343 }
9344
Thierry Strudel3d639192016-09-09 11:52:26 -07009345 int32_t scalar_formats[] = {
9346 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9347 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9348 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9349 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9350 HAL_PIXEL_FORMAT_RAW10,
9351 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009352 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9353 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9354 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009355
9356 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9357 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9358 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9359 count, MAX_SIZES_CNT, available_processed_sizes);
9360 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9361 available_processed_sizes, count * 2);
9362
9363 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9364 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9365 makeTable(gCamCapability[cameraId]->raw_dim,
9366 count, MAX_SIZES_CNT, available_raw_sizes);
9367 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9368 available_raw_sizes, count * 2);
9369
9370 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9371 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9372 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9373 count, MAX_SIZES_CNT, available_fps_ranges);
9374 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9375 available_fps_ranges, count * 2);
9376
9377 camera_metadata_rational exposureCompensationStep = {
9378 gCamCapability[cameraId]->exp_compensation_step.numerator,
9379 gCamCapability[cameraId]->exp_compensation_step.denominator};
9380 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9381 &exposureCompensationStep, 1);
9382
9383 Vector<uint8_t> availableVstabModes;
9384 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9385 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009386 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009387 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009388 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009389 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009390 count = IS_TYPE_MAX;
9391 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9392 for (size_t i = 0; i < count; i++) {
9393 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9394 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9395 eisSupported = true;
9396 break;
9397 }
9398 }
9399 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009400 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9401 }
9402 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9403 availableVstabModes.array(), availableVstabModes.size());
9404
9405 /*HAL 1 and HAL 3 common*/
9406 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9407 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9408 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009409 // Cap the max zoom to the max preferred value
9410 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009411 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9412 &maxZoom, 1);
9413
9414 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9415 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9416
9417 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9418 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9419 max3aRegions[2] = 0; /* AF not supported */
9420 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9421 max3aRegions, 3);
9422
9423 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9424 memset(prop, 0, sizeof(prop));
9425 property_get("persist.camera.facedetect", prop, "1");
9426 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9427 LOGD("Support face detection mode: %d",
9428 supportedFaceDetectMode);
9429
9430 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009431 /* support mode should be OFF if max number of face is 0 */
9432 if (maxFaces <= 0) {
9433 supportedFaceDetectMode = 0;
9434 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009435 Vector<uint8_t> availableFaceDetectModes;
9436 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9437 if (supportedFaceDetectMode == 1) {
9438 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9439 } else if (supportedFaceDetectMode == 2) {
9440 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9441 } else if (supportedFaceDetectMode == 3) {
9442 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9443 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9444 } else {
9445 maxFaces = 0;
9446 }
9447 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9448 availableFaceDetectModes.array(),
9449 availableFaceDetectModes.size());
9450 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9451 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009452 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9453 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9454 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009455
9456 int32_t exposureCompensationRange[] = {
9457 gCamCapability[cameraId]->exposure_compensation_min,
9458 gCamCapability[cameraId]->exposure_compensation_max};
9459 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9460 exposureCompensationRange,
9461 sizeof(exposureCompensationRange)/sizeof(int32_t));
9462
9463 uint8_t lensFacing = (facingBack) ?
9464 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9465 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9466
9467 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9468 available_thumbnail_sizes,
9469 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9470
9471 /*all sizes will be clubbed into this tag*/
9472 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9473 /*android.scaler.availableStreamConfigurations*/
9474 Vector<int32_t> available_stream_configs;
9475 cam_dimension_t active_array_dim;
9476 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9477 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009478
9479 /*advertise list of input dimensions supported based on below property.
9480 By default all sizes upto 5MP will be advertised.
9481 Note that the setprop resolution format should be WxH.
9482 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9483 To list all supported sizes, setprop needs to be set with "0x0" */
9484 cam_dimension_t minInputSize = {2592,1944}; //5MP
9485 memset(prop, 0, sizeof(prop));
9486 property_get("persist.camera.input.minsize", prop, "2592x1944");
9487 if (strlen(prop) > 0) {
9488 char *saveptr = NULL;
9489 char *token = strtok_r(prop, "x", &saveptr);
9490 if (token != NULL) {
9491 minInputSize.width = atoi(token);
9492 }
9493 token = strtok_r(NULL, "x", &saveptr);
9494 if (token != NULL) {
9495 minInputSize.height = atoi(token);
9496 }
9497 }
9498
Thierry Strudel3d639192016-09-09 11:52:26 -07009499 /* Add input/output stream configurations for each scalar formats*/
9500 for (size_t j = 0; j < scalar_formats_count; j++) {
9501 switch (scalar_formats[j]) {
9502 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9503 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9504 case HAL_PIXEL_FORMAT_RAW10:
9505 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9506 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9507 addStreamConfig(available_stream_configs, scalar_formats[j],
9508 gCamCapability[cameraId]->raw_dim[i],
9509 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9510 }
9511 break;
9512 case HAL_PIXEL_FORMAT_BLOB:
9513 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9514 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9515 addStreamConfig(available_stream_configs, scalar_formats[j],
9516 gCamCapability[cameraId]->picture_sizes_tbl[i],
9517 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9518 }
9519 break;
9520 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9521 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9522 default:
9523 cam_dimension_t largest_picture_size;
9524 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9525 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9526 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9527 addStreamConfig(available_stream_configs, scalar_formats[j],
9528 gCamCapability[cameraId]->picture_sizes_tbl[i],
9529 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009530 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009531 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9532 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009533 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9534 >= minInputSize.width) || (gCamCapability[cameraId]->
9535 picture_sizes_tbl[i].height >= minInputSize.height)) {
9536 addStreamConfig(available_stream_configs, scalar_formats[j],
9537 gCamCapability[cameraId]->picture_sizes_tbl[i],
9538 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9539 }
9540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009541 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009542
Thierry Strudel3d639192016-09-09 11:52:26 -07009543 break;
9544 }
9545 }
9546
9547 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9548 available_stream_configs.array(), available_stream_configs.size());
9549 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9550 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9551
9552 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9553 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9554
9555 /* android.scaler.availableMinFrameDurations */
9556 Vector<int64_t> available_min_durations;
9557 for (size_t j = 0; j < scalar_formats_count; j++) {
9558 switch (scalar_formats[j]) {
9559 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9560 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9561 case HAL_PIXEL_FORMAT_RAW10:
9562 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9563 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9564 available_min_durations.add(scalar_formats[j]);
9565 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9566 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9567 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9568 }
9569 break;
9570 default:
9571 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9572 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9573 available_min_durations.add(scalar_formats[j]);
9574 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9575 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9576 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9577 }
9578 break;
9579 }
9580 }
9581 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9582 available_min_durations.array(), available_min_durations.size());
9583
9584 Vector<int32_t> available_hfr_configs;
9585 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9586 int32_t fps = 0;
9587 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9588 case CAM_HFR_MODE_60FPS:
9589 fps = 60;
9590 break;
9591 case CAM_HFR_MODE_90FPS:
9592 fps = 90;
9593 break;
9594 case CAM_HFR_MODE_120FPS:
9595 fps = 120;
9596 break;
9597 case CAM_HFR_MODE_150FPS:
9598 fps = 150;
9599 break;
9600 case CAM_HFR_MODE_180FPS:
9601 fps = 180;
9602 break;
9603 case CAM_HFR_MODE_210FPS:
9604 fps = 210;
9605 break;
9606 case CAM_HFR_MODE_240FPS:
9607 fps = 240;
9608 break;
9609 case CAM_HFR_MODE_480FPS:
9610 fps = 480;
9611 break;
9612 case CAM_HFR_MODE_OFF:
9613 case CAM_HFR_MODE_MAX:
9614 default:
9615 break;
9616 }
9617
9618 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9619 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9620 /* For each HFR frame rate, need to advertise one variable fps range
9621 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9622 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9623 * set by the app. When video recording is started, [120, 120] is
9624 * set. This way sensor configuration does not change when recording
9625 * is started */
9626
9627 /* (width, height, fps_min, fps_max, batch_size_max) */
9628 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9629 j < MAX_SIZES_CNT; j++) {
9630 available_hfr_configs.add(
9631 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9632 available_hfr_configs.add(
9633 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9634 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9635 available_hfr_configs.add(fps);
9636 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9637
9638 /* (width, height, fps_min, fps_max, batch_size_max) */
9639 available_hfr_configs.add(
9640 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9641 available_hfr_configs.add(
9642 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9643 available_hfr_configs.add(fps);
9644 available_hfr_configs.add(fps);
9645 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9646 }
9647 }
9648 }
9649 //Advertise HFR capability only if the property is set
9650 memset(prop, 0, sizeof(prop));
9651 property_get("persist.camera.hal3hfr.enable", prop, "1");
9652 uint8_t hfrEnable = (uint8_t)atoi(prop);
9653
9654 if(hfrEnable && available_hfr_configs.array()) {
9655 staticInfo.update(
9656 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9657 available_hfr_configs.array(), available_hfr_configs.size());
9658 }
9659
9660 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9661 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9662 &max_jpeg_size, 1);
9663
9664 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9665 size_t size = 0;
9666 count = CAM_EFFECT_MODE_MAX;
9667 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9668 for (size_t i = 0; i < count; i++) {
9669 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9670 gCamCapability[cameraId]->supported_effects[i]);
9671 if (NAME_NOT_FOUND != val) {
9672 avail_effects[size] = (uint8_t)val;
9673 size++;
9674 }
9675 }
9676 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9677 avail_effects,
9678 size);
9679
9680 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9681 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9682 size_t supported_scene_modes_cnt = 0;
9683 count = CAM_SCENE_MODE_MAX;
9684 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9685 for (size_t i = 0; i < count; i++) {
9686 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9687 CAM_SCENE_MODE_OFF) {
9688 int val = lookupFwkName(SCENE_MODES_MAP,
9689 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9690 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009691
Thierry Strudel3d639192016-09-09 11:52:26 -07009692 if (NAME_NOT_FOUND != val) {
9693 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9694 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9695 supported_scene_modes_cnt++;
9696 }
9697 }
9698 }
9699 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9700 avail_scene_modes,
9701 supported_scene_modes_cnt);
9702
9703 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9704 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9705 supported_scene_modes_cnt,
9706 CAM_SCENE_MODE_MAX,
9707 scene_mode_overrides,
9708 supported_indexes,
9709 cameraId);
9710
9711 if (supported_scene_modes_cnt == 0) {
9712 supported_scene_modes_cnt = 1;
9713 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9714 }
9715
9716 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9717 scene_mode_overrides, supported_scene_modes_cnt * 3);
9718
9719 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9720 ANDROID_CONTROL_MODE_AUTO,
9721 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9722 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9723 available_control_modes,
9724 3);
9725
9726 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9727 size = 0;
9728 count = CAM_ANTIBANDING_MODE_MAX;
9729 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9730 for (size_t i = 0; i < count; i++) {
9731 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9732 gCamCapability[cameraId]->supported_antibandings[i]);
9733 if (NAME_NOT_FOUND != val) {
9734 avail_antibanding_modes[size] = (uint8_t)val;
9735 size++;
9736 }
9737
9738 }
9739 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9740 avail_antibanding_modes,
9741 size);
9742
9743 uint8_t avail_abberation_modes[] = {
9744 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9745 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9746 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9747 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9748 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9749 if (0 == count) {
9750 // If no aberration correction modes are available for a device, this advertise OFF mode
9751 size = 1;
9752 } else {
9753 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9754 // So, advertize all 3 modes if atleast any one mode is supported as per the
9755 // new M requirement
9756 size = 3;
9757 }
9758 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9759 avail_abberation_modes,
9760 size);
9761
9762 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9763 size = 0;
9764 count = CAM_FOCUS_MODE_MAX;
9765 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9766 for (size_t i = 0; i < count; i++) {
9767 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9768 gCamCapability[cameraId]->supported_focus_modes[i]);
9769 if (NAME_NOT_FOUND != val) {
9770 avail_af_modes[size] = (uint8_t)val;
9771 size++;
9772 }
9773 }
9774 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9775 avail_af_modes,
9776 size);
9777
9778 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9779 size = 0;
9780 count = CAM_WB_MODE_MAX;
9781 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9782 for (size_t i = 0; i < count; i++) {
9783 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9784 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9785 gCamCapability[cameraId]->supported_white_balances[i]);
9786 if (NAME_NOT_FOUND != val) {
9787 avail_awb_modes[size] = (uint8_t)val;
9788 size++;
9789 }
9790 }
9791 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9792 avail_awb_modes,
9793 size);
9794
9795 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9796 count = CAM_FLASH_FIRING_LEVEL_MAX;
9797 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9798 count);
9799 for (size_t i = 0; i < count; i++) {
9800 available_flash_levels[i] =
9801 gCamCapability[cameraId]->supported_firing_levels[i];
9802 }
9803 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9804 available_flash_levels, count);
9805
9806 uint8_t flashAvailable;
9807 if (gCamCapability[cameraId]->flash_available)
9808 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9809 else
9810 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9811 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9812 &flashAvailable, 1);
9813
9814 Vector<uint8_t> avail_ae_modes;
9815 count = CAM_AE_MODE_MAX;
9816 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9817 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009818 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9819 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9820 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9821 }
9822 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009823 }
9824 if (flashAvailable) {
9825 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9826 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9827 }
9828 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9829 avail_ae_modes.array(),
9830 avail_ae_modes.size());
9831
9832 int32_t sensitivity_range[2];
9833 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9834 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9835 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9836 sensitivity_range,
9837 sizeof(sensitivity_range) / sizeof(int32_t));
9838
9839 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9840 &gCamCapability[cameraId]->max_analog_sensitivity,
9841 1);
9842
9843 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9844 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9845 &sensor_orientation,
9846 1);
9847
9848 int32_t max_output_streams[] = {
9849 MAX_STALLING_STREAMS,
9850 MAX_PROCESSED_STREAMS,
9851 MAX_RAW_STREAMS};
9852 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9853 max_output_streams,
9854 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9855
9856 uint8_t avail_leds = 0;
9857 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9858 &avail_leds, 0);
9859
9860 uint8_t focus_dist_calibrated;
9861 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9862 gCamCapability[cameraId]->focus_dist_calibrated);
9863 if (NAME_NOT_FOUND != val) {
9864 focus_dist_calibrated = (uint8_t)val;
9865 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9866 &focus_dist_calibrated, 1);
9867 }
9868
9869 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9870 size = 0;
9871 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9872 MAX_TEST_PATTERN_CNT);
9873 for (size_t i = 0; i < count; i++) {
9874 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9875 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9876 if (NAME_NOT_FOUND != testpatternMode) {
9877 avail_testpattern_modes[size] = testpatternMode;
9878 size++;
9879 }
9880 }
9881 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9882 avail_testpattern_modes,
9883 size);
9884
9885 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9886 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9887 &max_pipeline_depth,
9888 1);
9889
9890 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9891 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9892 &partial_result_count,
9893 1);
9894
9895 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9896 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9897
9898 Vector<uint8_t> available_capabilities;
9899 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9900 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9901 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9902 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9903 if (supportBurst) {
9904 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9905 }
9906 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9907 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9908 if (hfrEnable && available_hfr_configs.array()) {
9909 available_capabilities.add(
9910 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9911 }
9912
9913 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9914 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9915 }
9916 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9917 available_capabilities.array(),
9918 available_capabilities.size());
9919
9920 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9921 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9922 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9923 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9924
9925 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9926 &aeLockAvailable, 1);
9927
9928 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9929 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9930 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9931 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9932
9933 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9934 &awbLockAvailable, 1);
9935
9936 int32_t max_input_streams = 1;
9937 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9938 &max_input_streams,
9939 1);
9940
9941 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9942 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9943 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9944 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9945 HAL_PIXEL_FORMAT_YCbCr_420_888};
9946 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9947 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9948
9949 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9950 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9951 &max_latency,
9952 1);
9953
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009954#ifndef USE_HAL_3_3
9955 int32_t isp_sensitivity_range[2];
9956 isp_sensitivity_range[0] =
9957 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9958 isp_sensitivity_range[1] =
9959 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9960 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9961 isp_sensitivity_range,
9962 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9963#endif
9964
Thierry Strudel3d639192016-09-09 11:52:26 -07009965 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9966 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9967 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9968 available_hot_pixel_modes,
9969 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9970
9971 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9972 ANDROID_SHADING_MODE_FAST,
9973 ANDROID_SHADING_MODE_HIGH_QUALITY};
9974 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9975 available_shading_modes,
9976 3);
9977
9978 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9979 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9980 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9981 available_lens_shading_map_modes,
9982 2);
9983
9984 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9985 ANDROID_EDGE_MODE_FAST,
9986 ANDROID_EDGE_MODE_HIGH_QUALITY,
9987 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9988 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9989 available_edge_modes,
9990 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9991
9992 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9993 ANDROID_NOISE_REDUCTION_MODE_FAST,
9994 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9995 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9996 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9997 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9998 available_noise_red_modes,
9999 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10000
10001 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10002 ANDROID_TONEMAP_MODE_FAST,
10003 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10004 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10005 available_tonemap_modes,
10006 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10007
10008 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10009 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10010 available_hot_pixel_map_modes,
10011 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10012
10013 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10014 gCamCapability[cameraId]->reference_illuminant1);
10015 if (NAME_NOT_FOUND != val) {
10016 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10017 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10018 }
10019
10020 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10021 gCamCapability[cameraId]->reference_illuminant2);
10022 if (NAME_NOT_FOUND != val) {
10023 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10024 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10025 }
10026
10027 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10028 (void *)gCamCapability[cameraId]->forward_matrix1,
10029 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10030
10031 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10032 (void *)gCamCapability[cameraId]->forward_matrix2,
10033 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10034
10035 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10036 (void *)gCamCapability[cameraId]->color_transform1,
10037 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10038
10039 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10040 (void *)gCamCapability[cameraId]->color_transform2,
10041 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10042
10043 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10044 (void *)gCamCapability[cameraId]->calibration_transform1,
10045 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10046
10047 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10048 (void *)gCamCapability[cameraId]->calibration_transform2,
10049 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10050
10051 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10052 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10053 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10054 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10055 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10056 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10057 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10058 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10059 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10060 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10061 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10062 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10063 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10064 ANDROID_JPEG_GPS_COORDINATES,
10065 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10066 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10067 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10068 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10069 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10070 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10071 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10072 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10073 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10074 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010075#ifndef USE_HAL_3_3
10076 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10077#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010078 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010079 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010080 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10081 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010082 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010083 /* DevCamDebug metadata request_keys_basic */
10084 DEVCAMDEBUG_META_ENABLE,
10085 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010086 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010087 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010088 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010089 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010090 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010091 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010092
10093 size_t request_keys_cnt =
10094 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10095 Vector<int32_t> available_request_keys;
10096 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10097 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10098 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10099 }
10100
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010101 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010102 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10103 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10104 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010105 }
10106
Thierry Strudel3d639192016-09-09 11:52:26 -070010107 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10108 available_request_keys.array(), available_request_keys.size());
10109
10110 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10111 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10112 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10113 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10114 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10115 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10116 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10117 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10118 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10119 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10120 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10121 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10122 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10123 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10124 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10125 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10126 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010127 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010128 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10129 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10130 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010131 ANDROID_STATISTICS_FACE_SCORES,
10132#ifndef USE_HAL_3_3
10133 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10134#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010135 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010136 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010137 // DevCamDebug metadata result_keys_basic
10138 DEVCAMDEBUG_META_ENABLE,
10139 // DevCamDebug metadata result_keys AF
10140 DEVCAMDEBUG_AF_LENS_POSITION,
10141 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10142 DEVCAMDEBUG_AF_TOF_DISTANCE,
10143 DEVCAMDEBUG_AF_LUMA,
10144 DEVCAMDEBUG_AF_HAF_STATE,
10145 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10146 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10147 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10148 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10149 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10150 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10151 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10152 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10153 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10154 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10155 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10156 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10157 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10158 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10159 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10160 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10161 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10162 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10163 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10164 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10165 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10166 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10167 // DevCamDebug metadata result_keys AEC
10168 DEVCAMDEBUG_AEC_TARGET_LUMA,
10169 DEVCAMDEBUG_AEC_COMP_LUMA,
10170 DEVCAMDEBUG_AEC_AVG_LUMA,
10171 DEVCAMDEBUG_AEC_CUR_LUMA,
10172 DEVCAMDEBUG_AEC_LINECOUNT,
10173 DEVCAMDEBUG_AEC_REAL_GAIN,
10174 DEVCAMDEBUG_AEC_EXP_INDEX,
10175 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010176 // DevCamDebug metadata result_keys zzHDR
10177 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10178 DEVCAMDEBUG_AEC_L_LINECOUNT,
10179 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10180 DEVCAMDEBUG_AEC_S_LINECOUNT,
10181 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10182 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10183 // DevCamDebug metadata result_keys ADRC
10184 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10185 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10186 DEVCAMDEBUG_AEC_GTM_RATIO,
10187 DEVCAMDEBUG_AEC_LTM_RATIO,
10188 DEVCAMDEBUG_AEC_LA_RATIO,
10189 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010190 // DevCamDebug metadata result_keys AWB
10191 DEVCAMDEBUG_AWB_R_GAIN,
10192 DEVCAMDEBUG_AWB_G_GAIN,
10193 DEVCAMDEBUG_AWB_B_GAIN,
10194 DEVCAMDEBUG_AWB_CCT,
10195 DEVCAMDEBUG_AWB_DECISION,
10196 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010197 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10198 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10199 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010200 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010201 };
10202
Thierry Strudel3d639192016-09-09 11:52:26 -070010203 size_t result_keys_cnt =
10204 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10205
10206 Vector<int32_t> available_result_keys;
10207 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10208 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10209 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10210 }
10211 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10212 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10213 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10214 }
10215 if (supportedFaceDetectMode == 1) {
10216 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10217 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10218 } else if ((supportedFaceDetectMode == 2) ||
10219 (supportedFaceDetectMode == 3)) {
10220 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10221 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10222 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010223#ifndef USE_HAL_3_3
10224 if (hasBlackRegions) {
10225 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10226 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10227 }
10228#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010229
10230 if (gExposeEnableZslKey) {
10231 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10232 }
10233
Thierry Strudel3d639192016-09-09 11:52:26 -070010234 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10235 available_result_keys.array(), available_result_keys.size());
10236
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010237 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010238 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10239 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10240 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10241 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10242 ANDROID_SCALER_CROPPING_TYPE,
10243 ANDROID_SYNC_MAX_LATENCY,
10244 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10245 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10246 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10247 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10248 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10249 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10250 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10251 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10252 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10253 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10254 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10255 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10256 ANDROID_LENS_FACING,
10257 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10258 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10259 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10260 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10261 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10262 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10263 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10264 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10265 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10266 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10267 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10268 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10269 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10270 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10271 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10272 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10273 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10274 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10275 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10276 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010277 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010278 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10279 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10280 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10281 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10282 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10283 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10284 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10285 ANDROID_CONTROL_AVAILABLE_MODES,
10286 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10287 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10288 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10289 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010290 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10291#ifndef USE_HAL_3_3
10292 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10293 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10294#endif
10295 };
10296
10297 Vector<int32_t> available_characteristics_keys;
10298 available_characteristics_keys.appendArray(characteristics_keys_basic,
10299 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10300#ifndef USE_HAL_3_3
10301 if (hasBlackRegions) {
10302 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10303 }
10304#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010305
10306 if (0 <= indexPD) {
10307 int32_t depthKeys[] = {
10308 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10309 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10310 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10311 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10312 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10313 };
10314 available_characteristics_keys.appendArray(depthKeys,
10315 sizeof(depthKeys) / sizeof(depthKeys[0]));
10316 }
10317
Thierry Strudel3d639192016-09-09 11:52:26 -070010318 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010319 available_characteristics_keys.array(),
10320 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010321
10322 /*available stall durations depend on the hw + sw and will be different for different devices */
10323 /*have to add for raw after implementation*/
10324 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10325 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10326
10327 Vector<int64_t> available_stall_durations;
10328 for (uint32_t j = 0; j < stall_formats_count; j++) {
10329 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10330 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10331 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10332 available_stall_durations.add(stall_formats[j]);
10333 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10334 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10335 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10336 }
10337 } else {
10338 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10339 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10340 available_stall_durations.add(stall_formats[j]);
10341 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10342 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10343 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10344 }
10345 }
10346 }
10347 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10348 available_stall_durations.array(),
10349 available_stall_durations.size());
10350
10351 //QCAMERA3_OPAQUE_RAW
10352 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10353 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10354 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10355 case LEGACY_RAW:
10356 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10357 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10358 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10359 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10360 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10361 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10362 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10363 break;
10364 case MIPI_RAW:
10365 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10366 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10367 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10368 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10369 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10370 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10371 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10372 break;
10373 default:
10374 LOGE("unknown opaque_raw_format %d",
10375 gCamCapability[cameraId]->opaque_raw_fmt);
10376 break;
10377 }
10378 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10379
10380 Vector<int32_t> strides;
10381 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10382 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10383 cam_stream_buf_plane_info_t buf_planes;
10384 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10385 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10386 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10387 &gCamCapability[cameraId]->padding_info, &buf_planes);
10388 strides.add(buf_planes.plane_info.mp[0].stride);
10389 }
10390 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10391 strides.size());
10392
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010393 //TBD: remove the following line once backend advertises zzHDR in feature mask
10394 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010395 //Video HDR default
10396 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10397 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010398 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010399 int32_t vhdr_mode[] = {
10400 QCAMERA3_VIDEO_HDR_MODE_OFF,
10401 QCAMERA3_VIDEO_HDR_MODE_ON};
10402
10403 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10404 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10405 vhdr_mode, vhdr_mode_count);
10406 }
10407
Thierry Strudel3d639192016-09-09 11:52:26 -070010408 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10409 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10410 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10411
10412 uint8_t isMonoOnly =
10413 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10414 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10415 &isMonoOnly, 1);
10416
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010417#ifndef USE_HAL_3_3
10418 Vector<int32_t> opaque_size;
10419 for (size_t j = 0; j < scalar_formats_count; j++) {
10420 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10421 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10422 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10423 cam_stream_buf_plane_info_t buf_planes;
10424
10425 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10426 &gCamCapability[cameraId]->padding_info, &buf_planes);
10427
10428 if (rc == 0) {
10429 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10430 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10431 opaque_size.add(buf_planes.plane_info.frame_len);
10432 }else {
10433 LOGE("raw frame calculation failed!");
10434 }
10435 }
10436 }
10437 }
10438
10439 if ((opaque_size.size() > 0) &&
10440 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10441 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10442 else
10443 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10444#endif
10445
Thierry Strudel04e026f2016-10-10 11:27:36 -070010446 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10447 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10448 size = 0;
10449 count = CAM_IR_MODE_MAX;
10450 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10451 for (size_t i = 0; i < count; i++) {
10452 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10453 gCamCapability[cameraId]->supported_ir_modes[i]);
10454 if (NAME_NOT_FOUND != val) {
10455 avail_ir_modes[size] = (int32_t)val;
10456 size++;
10457 }
10458 }
10459 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10460 avail_ir_modes, size);
10461 }
10462
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010463 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10464 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10465 size = 0;
10466 count = CAM_AEC_CONVERGENCE_MAX;
10467 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10468 for (size_t i = 0; i < count; i++) {
10469 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10470 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10471 if (NAME_NOT_FOUND != val) {
10472 available_instant_aec_modes[size] = (int32_t)val;
10473 size++;
10474 }
10475 }
10476 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10477 available_instant_aec_modes, size);
10478 }
10479
Thierry Strudel54dc9782017-02-15 12:12:10 -080010480 int32_t sharpness_range[] = {
10481 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10482 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10483 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10484
10485 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10486 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10487 size = 0;
10488 count = CAM_BINNING_CORRECTION_MODE_MAX;
10489 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10490 for (size_t i = 0; i < count; i++) {
10491 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10492 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10493 gCamCapability[cameraId]->supported_binning_modes[i]);
10494 if (NAME_NOT_FOUND != val) {
10495 avail_binning_modes[size] = (int32_t)val;
10496 size++;
10497 }
10498 }
10499 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10500 avail_binning_modes, size);
10501 }
10502
10503 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10504 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10505 size = 0;
10506 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10507 for (size_t i = 0; i < count; i++) {
10508 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10509 gCamCapability[cameraId]->supported_aec_modes[i]);
10510 if (NAME_NOT_FOUND != val)
10511 available_aec_modes[size++] = val;
10512 }
10513 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10514 available_aec_modes, size);
10515 }
10516
10517 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10518 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10519 size = 0;
10520 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10521 for (size_t i = 0; i < count; i++) {
10522 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10523 gCamCapability[cameraId]->supported_iso_modes[i]);
10524 if (NAME_NOT_FOUND != val)
10525 available_iso_modes[size++] = val;
10526 }
10527 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10528 available_iso_modes, size);
10529 }
10530
10531 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010532 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010533 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10534 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10535 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10536
10537 int32_t available_saturation_range[4];
10538 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10539 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10540 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10541 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10542 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10543 available_saturation_range, 4);
10544
10545 uint8_t is_hdr_values[2];
10546 is_hdr_values[0] = 0;
10547 is_hdr_values[1] = 1;
10548 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10549 is_hdr_values, 2);
10550
10551 float is_hdr_confidence_range[2];
10552 is_hdr_confidence_range[0] = 0.0;
10553 is_hdr_confidence_range[1] = 1.0;
10554 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10555 is_hdr_confidence_range, 2);
10556
Emilian Peev0a972ef2017-03-16 10:25:53 +000010557 size_t eepromLength = strnlen(
10558 reinterpret_cast<const char *>(
10559 gCamCapability[cameraId]->eeprom_version_info),
10560 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10561 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010562 char easelInfo[] = ",E:N";
10563 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10564 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10565 eepromLength += sizeof(easelInfo);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010566 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10567 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010568 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010569 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010570 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10571 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10572 }
10573
Thierry Strudel3d639192016-09-09 11:52:26 -070010574 gStaticMetadata[cameraId] = staticInfo.release();
10575 return rc;
10576}
10577
10578/*===========================================================================
10579 * FUNCTION : makeTable
10580 *
10581 * DESCRIPTION: make a table of sizes
10582 *
10583 * PARAMETERS :
10584 *
10585 *
10586 *==========================================================================*/
10587void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10588 size_t max_size, int32_t *sizeTable)
10589{
10590 size_t j = 0;
10591 if (size > max_size) {
10592 size = max_size;
10593 }
10594 for (size_t i = 0; i < size; i++) {
10595 sizeTable[j] = dimTable[i].width;
10596 sizeTable[j+1] = dimTable[i].height;
10597 j+=2;
10598 }
10599}
10600
10601/*===========================================================================
10602 * FUNCTION : makeFPSTable
10603 *
10604 * DESCRIPTION: make a table of fps ranges
10605 *
10606 * PARAMETERS :
10607 *
10608 *==========================================================================*/
10609void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10610 size_t max_size, int32_t *fpsRangesTable)
10611{
10612 size_t j = 0;
10613 if (size > max_size) {
10614 size = max_size;
10615 }
10616 for (size_t i = 0; i < size; i++) {
10617 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10618 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10619 j+=2;
10620 }
10621}
10622
10623/*===========================================================================
10624 * FUNCTION : makeOverridesList
10625 *
10626 * DESCRIPTION: make a list of scene mode overrides
10627 *
10628 * PARAMETERS :
10629 *
10630 *
10631 *==========================================================================*/
10632void QCamera3HardwareInterface::makeOverridesList(
10633 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10634 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10635{
10636 /*daemon will give a list of overrides for all scene modes.
10637 However we should send the fwk only the overrides for the scene modes
10638 supported by the framework*/
10639 size_t j = 0;
10640 if (size > max_size) {
10641 size = max_size;
10642 }
10643 size_t focus_count = CAM_FOCUS_MODE_MAX;
10644 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10645 focus_count);
10646 for (size_t i = 0; i < size; i++) {
10647 bool supt = false;
10648 size_t index = supported_indexes[i];
10649 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10650 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10651 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10652 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10653 overridesTable[index].awb_mode);
10654 if (NAME_NOT_FOUND != val) {
10655 overridesList[j+1] = (uint8_t)val;
10656 }
10657 uint8_t focus_override = overridesTable[index].af_mode;
10658 for (size_t k = 0; k < focus_count; k++) {
10659 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10660 supt = true;
10661 break;
10662 }
10663 }
10664 if (supt) {
10665 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10666 focus_override);
10667 if (NAME_NOT_FOUND != val) {
10668 overridesList[j+2] = (uint8_t)val;
10669 }
10670 } else {
10671 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10672 }
10673 j+=3;
10674 }
10675}
10676
10677/*===========================================================================
10678 * FUNCTION : filterJpegSizes
10679 *
10680 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10681 * could be downscaled to
10682 *
10683 * PARAMETERS :
10684 *
10685 * RETURN : length of jpegSizes array
10686 *==========================================================================*/
10687
10688size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10689 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10690 uint8_t downscale_factor)
10691{
10692 if (0 == downscale_factor) {
10693 downscale_factor = 1;
10694 }
10695
10696 int32_t min_width = active_array_size.width / downscale_factor;
10697 int32_t min_height = active_array_size.height / downscale_factor;
10698 size_t jpegSizesCnt = 0;
10699 if (processedSizesCnt > maxCount) {
10700 processedSizesCnt = maxCount;
10701 }
10702 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10703 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10704 jpegSizes[jpegSizesCnt] = processedSizes[i];
10705 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10706 jpegSizesCnt += 2;
10707 }
10708 }
10709 return jpegSizesCnt;
10710}
10711
10712/*===========================================================================
10713 * FUNCTION : computeNoiseModelEntryS
10714 *
10715 * DESCRIPTION: function to map a given sensitivity to the S noise
10716 * model parameters in the DNG noise model.
10717 *
10718 * PARAMETERS : sens : the sensor sensitivity
10719 *
10720 ** RETURN : S (sensor amplification) noise
10721 *
10722 *==========================================================================*/
10723double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10724 double s = gCamCapability[mCameraId]->gradient_S * sens +
10725 gCamCapability[mCameraId]->offset_S;
10726 return ((s < 0.0) ? 0.0 : s);
10727}
10728
10729/*===========================================================================
10730 * FUNCTION : computeNoiseModelEntryO
10731 *
10732 * DESCRIPTION: function to map a given sensitivity to the O noise
10733 * model parameters in the DNG noise model.
10734 *
10735 * PARAMETERS : sens : the sensor sensitivity
10736 *
10737 ** RETURN : O (sensor readout) noise
10738 *
10739 *==========================================================================*/
10740double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10741 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10742 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10743 1.0 : (1.0 * sens / max_analog_sens);
10744 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10745 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10746 return ((o < 0.0) ? 0.0 : o);
10747}
10748
10749/*===========================================================================
10750 * FUNCTION : getSensorSensitivity
10751 *
10752 * DESCRIPTION: convert iso_mode to an integer value
10753 *
10754 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10755 *
10756 ** RETURN : sensitivity supported by sensor
10757 *
10758 *==========================================================================*/
10759int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10760{
10761 int32_t sensitivity;
10762
10763 switch (iso_mode) {
10764 case CAM_ISO_MODE_100:
10765 sensitivity = 100;
10766 break;
10767 case CAM_ISO_MODE_200:
10768 sensitivity = 200;
10769 break;
10770 case CAM_ISO_MODE_400:
10771 sensitivity = 400;
10772 break;
10773 case CAM_ISO_MODE_800:
10774 sensitivity = 800;
10775 break;
10776 case CAM_ISO_MODE_1600:
10777 sensitivity = 1600;
10778 break;
10779 default:
10780 sensitivity = -1;
10781 break;
10782 }
10783 return sensitivity;
10784}
10785
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010786int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010787 if (gEaselManagerClient == nullptr) {
10788 gEaselManagerClient = EaselManagerClient::create();
10789 if (gEaselManagerClient == nullptr) {
10790 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10791 return -ENODEV;
10792 }
10793 }
10794
10795 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010796 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10797 // to connect to Easel.
10798 bool doNotpowerOnEasel =
10799 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10800
10801 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010802 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10803 return OK;
10804 }
10805
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010806 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010807 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010808 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010809 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010810 return res;
10811 }
10812
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010813 EaselManagerClientOpened = true;
10814
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010815 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010816 if (res != OK) {
10817 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10818 }
10819
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010820 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010821 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010822
10823 // Expose enableZsl key only when HDR+ mode is enabled.
10824 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010825 }
10826
10827 return OK;
10828}
10829
Thierry Strudel3d639192016-09-09 11:52:26 -070010830/*===========================================================================
10831 * FUNCTION : getCamInfo
10832 *
10833 * DESCRIPTION: query camera capabilities
10834 *
10835 * PARAMETERS :
10836 * @cameraId : camera Id
10837 * @info : camera info struct to be filled in with camera capabilities
10838 *
10839 * RETURN : int type of status
10840 * NO_ERROR -- success
10841 * none-zero failure code
10842 *==========================================================================*/
10843int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10844 struct camera_info *info)
10845{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010846 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010847 int rc = 0;
10848
10849 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010850
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010851 {
10852 Mutex::Autolock l(gHdrPlusClientLock);
10853 rc = initHdrPlusClientLocked();
10854 if (rc != OK) {
10855 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10856 pthread_mutex_unlock(&gCamLock);
10857 return rc;
10858 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010859 }
10860
Thierry Strudel3d639192016-09-09 11:52:26 -070010861 if (NULL == gCamCapability[cameraId]) {
10862 rc = initCapabilities(cameraId);
10863 if (rc < 0) {
10864 pthread_mutex_unlock(&gCamLock);
10865 return rc;
10866 }
10867 }
10868
10869 if (NULL == gStaticMetadata[cameraId]) {
10870 rc = initStaticMetadata(cameraId);
10871 if (rc < 0) {
10872 pthread_mutex_unlock(&gCamLock);
10873 return rc;
10874 }
10875 }
10876
10877 switch(gCamCapability[cameraId]->position) {
10878 case CAM_POSITION_BACK:
10879 case CAM_POSITION_BACK_AUX:
10880 info->facing = CAMERA_FACING_BACK;
10881 break;
10882
10883 case CAM_POSITION_FRONT:
10884 case CAM_POSITION_FRONT_AUX:
10885 info->facing = CAMERA_FACING_FRONT;
10886 break;
10887
10888 default:
10889 LOGE("Unknown position type %d for camera id:%d",
10890 gCamCapability[cameraId]->position, cameraId);
10891 rc = -1;
10892 break;
10893 }
10894
10895
10896 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010897#ifndef USE_HAL_3_3
10898 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10899#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010900 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010901#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010902 info->static_camera_characteristics = gStaticMetadata[cameraId];
10903
10904 //For now assume both cameras can operate independently.
10905 info->conflicting_devices = NULL;
10906 info->conflicting_devices_length = 0;
10907
10908 //resource cost is 100 * MIN(1.0, m/M),
10909 //where m is throughput requirement with maximum stream configuration
10910 //and M is CPP maximum throughput.
10911 float max_fps = 0.0;
10912 for (uint32_t i = 0;
10913 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10914 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10915 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10916 }
10917 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10918 gCamCapability[cameraId]->active_array_size.width *
10919 gCamCapability[cameraId]->active_array_size.height * max_fps /
10920 gCamCapability[cameraId]->max_pixel_bandwidth;
10921 info->resource_cost = 100 * MIN(1.0, ratio);
10922 LOGI("camera %d resource cost is %d", cameraId,
10923 info->resource_cost);
10924
10925 pthread_mutex_unlock(&gCamLock);
10926 return rc;
10927}
10928
10929/*===========================================================================
10930 * FUNCTION : translateCapabilityToMetadata
10931 *
10932 * DESCRIPTION: translate the capability into camera_metadata_t
10933 *
10934 * PARAMETERS : type of the request
10935 *
10936 *
10937 * RETURN : success: camera_metadata_t*
10938 * failure: NULL
10939 *
10940 *==========================================================================*/
10941camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10942{
10943 if (mDefaultMetadata[type] != NULL) {
10944 return mDefaultMetadata[type];
10945 }
10946 //first time we are handling this request
10947 //fill up the metadata structure using the wrapper class
10948 CameraMetadata settings;
10949 //translate from cam_capability_t to camera_metadata_tag_t
10950 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10951 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10952 int32_t defaultRequestID = 0;
10953 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10954
10955 /* OIS disable */
10956 char ois_prop[PROPERTY_VALUE_MAX];
10957 memset(ois_prop, 0, sizeof(ois_prop));
10958 property_get("persist.camera.ois.disable", ois_prop, "0");
10959 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10960
10961 /* Force video to use OIS */
10962 char videoOisProp[PROPERTY_VALUE_MAX];
10963 memset(videoOisProp, 0, sizeof(videoOisProp));
10964 property_get("persist.camera.ois.video", videoOisProp, "1");
10965 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010966
10967 // Hybrid AE enable/disable
10968 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10969 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10970 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10971 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10972
Thierry Strudel3d639192016-09-09 11:52:26 -070010973 uint8_t controlIntent = 0;
10974 uint8_t focusMode;
10975 uint8_t vsMode;
10976 uint8_t optStabMode;
10977 uint8_t cacMode;
10978 uint8_t edge_mode;
10979 uint8_t noise_red_mode;
10980 uint8_t tonemap_mode;
10981 bool highQualityModeEntryAvailable = FALSE;
10982 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010983 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010984 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10985 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010986 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010987 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010988 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010989
Thierry Strudel3d639192016-09-09 11:52:26 -070010990 switch (type) {
10991 case CAMERA3_TEMPLATE_PREVIEW:
10992 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10993 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10994 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10995 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10996 edge_mode = ANDROID_EDGE_MODE_FAST;
10997 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10998 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10999 break;
11000 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11001 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11002 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11003 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11004 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11005 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11006 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11007 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11008 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11009 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11010 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11011 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11012 highQualityModeEntryAvailable = TRUE;
11013 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11014 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11015 fastModeEntryAvailable = TRUE;
11016 }
11017 }
11018 if (highQualityModeEntryAvailable) {
11019 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11020 } else if (fastModeEntryAvailable) {
11021 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11022 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011023 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11024 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11025 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011026 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011027 break;
11028 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11029 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11030 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11031 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011032 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11033 edge_mode = ANDROID_EDGE_MODE_FAST;
11034 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11035 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11036 if (forceVideoOis)
11037 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11038 break;
11039 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11040 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11041 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11042 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011043 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11044 edge_mode = ANDROID_EDGE_MODE_FAST;
11045 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11046 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11047 if (forceVideoOis)
11048 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11049 break;
11050 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11051 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11052 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11053 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11054 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11055 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11056 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11057 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11058 break;
11059 case CAMERA3_TEMPLATE_MANUAL:
11060 edge_mode = ANDROID_EDGE_MODE_FAST;
11061 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11062 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11063 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11064 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11065 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11066 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11067 break;
11068 default:
11069 edge_mode = ANDROID_EDGE_MODE_FAST;
11070 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11071 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11072 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11073 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11074 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11075 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11076 break;
11077 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011078 // Set CAC to OFF if underlying device doesn't support
11079 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11080 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11081 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011082 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11083 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11084 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11085 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11086 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11087 }
11088 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011089 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011090 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011091
11092 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11093 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11094 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11095 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11096 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11097 || ois_disable)
11098 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11099 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011100 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011101
11102 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11103 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11104
11105 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11106 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11107
11108 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11109 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11110
11111 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11112 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11113
11114 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11115 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11116
11117 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11118 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11119
11120 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11121 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11122
11123 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11124 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11125
11126 /*flash*/
11127 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11128 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11129
11130 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11131 settings.update(ANDROID_FLASH_FIRING_POWER,
11132 &flashFiringLevel, 1);
11133
11134 /* lens */
11135 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11136 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11137
11138 if (gCamCapability[mCameraId]->filter_densities_count) {
11139 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11140 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11141 gCamCapability[mCameraId]->filter_densities_count);
11142 }
11143
11144 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11145 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11146
Thierry Strudel3d639192016-09-09 11:52:26 -070011147 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11148 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11149
11150 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11151 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11152
11153 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11154 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11155
11156 /* face detection (default to OFF) */
11157 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11158 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11159
Thierry Strudel54dc9782017-02-15 12:12:10 -080011160 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11161 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011162
11163 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11164 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11165
11166 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11167 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11168
Thierry Strudel3d639192016-09-09 11:52:26 -070011169
11170 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11171 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11172
11173 /* Exposure time(Update the Min Exposure Time)*/
11174 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11175 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11176
11177 /* frame duration */
11178 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11179 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11180
11181 /* sensitivity */
11182 static const int32_t default_sensitivity = 100;
11183 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011184#ifndef USE_HAL_3_3
11185 static const int32_t default_isp_sensitivity =
11186 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11187 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11188#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011189
11190 /*edge mode*/
11191 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11192
11193 /*noise reduction mode*/
11194 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11195
11196 /*color correction mode*/
11197 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11198 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11199
11200 /*transform matrix mode*/
11201 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11202
11203 int32_t scaler_crop_region[4];
11204 scaler_crop_region[0] = 0;
11205 scaler_crop_region[1] = 0;
11206 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11207 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11208 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11209
11210 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11211 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11212
11213 /*focus distance*/
11214 float focus_distance = 0.0;
11215 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11216
11217 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011218 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011219 float max_range = 0.0;
11220 float max_fixed_fps = 0.0;
11221 int32_t fps_range[2] = {0, 0};
11222 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11223 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011224 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11225 TEMPLATE_MAX_PREVIEW_FPS) {
11226 continue;
11227 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011228 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11229 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11230 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11231 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11232 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11233 if (range > max_range) {
11234 fps_range[0] =
11235 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11236 fps_range[1] =
11237 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11238 max_range = range;
11239 }
11240 } else {
11241 if (range < 0.01 && max_fixed_fps <
11242 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11243 fps_range[0] =
11244 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11245 fps_range[1] =
11246 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11247 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11248 }
11249 }
11250 }
11251 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11252
11253 /*precapture trigger*/
11254 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11255 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11256
11257 /*af trigger*/
11258 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11259 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11260
11261 /* ae & af regions */
11262 int32_t active_region[] = {
11263 gCamCapability[mCameraId]->active_array_size.left,
11264 gCamCapability[mCameraId]->active_array_size.top,
11265 gCamCapability[mCameraId]->active_array_size.left +
11266 gCamCapability[mCameraId]->active_array_size.width,
11267 gCamCapability[mCameraId]->active_array_size.top +
11268 gCamCapability[mCameraId]->active_array_size.height,
11269 0};
11270 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11271 sizeof(active_region) / sizeof(active_region[0]));
11272 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11273 sizeof(active_region) / sizeof(active_region[0]));
11274
11275 /* black level lock */
11276 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11277 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11278
Thierry Strudel3d639192016-09-09 11:52:26 -070011279 //special defaults for manual template
11280 if (type == CAMERA3_TEMPLATE_MANUAL) {
11281 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11282 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11283
11284 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11285 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11286
11287 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11288 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11289
11290 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11291 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11292
11293 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11294 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11295
11296 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11297 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11298 }
11299
11300
11301 /* TNR
11302 * We'll use this location to determine which modes TNR will be set.
11303 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11304 * This is not to be confused with linking on a per stream basis that decision
11305 * is still on per-session basis and will be handled as part of config stream
11306 */
11307 uint8_t tnr_enable = 0;
11308
11309 if (m_bTnrPreview || m_bTnrVideo) {
11310
11311 switch (type) {
11312 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11313 tnr_enable = 1;
11314 break;
11315
11316 default:
11317 tnr_enable = 0;
11318 break;
11319 }
11320
11321 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11322 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11323 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11324
11325 LOGD("TNR:%d with process plate %d for template:%d",
11326 tnr_enable, tnr_process_type, type);
11327 }
11328
11329 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011330 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011331 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11332
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011333 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011334 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11335
Shuzhen Wang920ea402017-05-03 08:49:39 -070011336 uint8_t related_camera_id = mCameraId;
11337 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011338
11339 /* CDS default */
11340 char prop[PROPERTY_VALUE_MAX];
11341 memset(prop, 0, sizeof(prop));
11342 property_get("persist.camera.CDS", prop, "Auto");
11343 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11344 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11345 if (CAM_CDS_MODE_MAX == cds_mode) {
11346 cds_mode = CAM_CDS_MODE_AUTO;
11347 }
11348
11349 /* Disabling CDS in templates which have TNR enabled*/
11350 if (tnr_enable)
11351 cds_mode = CAM_CDS_MODE_OFF;
11352
11353 int32_t mode = cds_mode;
11354 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011355
Thierry Strudel269c81a2016-10-12 12:13:59 -070011356 /* Manual Convergence AEC Speed is disabled by default*/
11357 float default_aec_speed = 0;
11358 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11359
11360 /* Manual Convergence AWB Speed is disabled by default*/
11361 float default_awb_speed = 0;
11362 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11363
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011364 // Set instant AEC to normal convergence by default
11365 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11366 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11367
Shuzhen Wang19463d72016-03-08 11:09:52 -080011368 /* hybrid ae */
11369 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11370
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011371 if (gExposeEnableZslKey) {
11372 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11373 }
11374
Thierry Strudel3d639192016-09-09 11:52:26 -070011375 mDefaultMetadata[type] = settings.release();
11376
11377 return mDefaultMetadata[type];
11378}
11379
11380/*===========================================================================
11381 * FUNCTION : setFrameParameters
11382 *
11383 * DESCRIPTION: set parameters per frame as requested in the metadata from
11384 * framework
11385 *
11386 * PARAMETERS :
11387 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011388 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011389 * @blob_request: Whether this request is a blob request or not
11390 *
11391 * RETURN : success: NO_ERROR
11392 * failure:
11393 *==========================================================================*/
11394int QCamera3HardwareInterface::setFrameParameters(
11395 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011396 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011397 int blob_request,
11398 uint32_t snapshotStreamId)
11399{
11400 /*translate from camera_metadata_t type to parm_type_t*/
11401 int rc = 0;
11402 int32_t hal_version = CAM_HAL_V3;
11403
11404 clear_metadata_buffer(mParameters);
11405 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11406 LOGE("Failed to set hal version in the parameters");
11407 return BAD_VALUE;
11408 }
11409
11410 /*we need to update the frame number in the parameters*/
11411 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11412 request->frame_number)) {
11413 LOGE("Failed to set the frame number in the parameters");
11414 return BAD_VALUE;
11415 }
11416
11417 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011418 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011419 LOGE("Failed to set stream type mask in the parameters");
11420 return BAD_VALUE;
11421 }
11422
11423 if (mUpdateDebugLevel) {
11424 uint32_t dummyDebugLevel = 0;
11425 /* The value of dummyDebugLevel is irrelavent. On
11426 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11427 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11428 dummyDebugLevel)) {
11429 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11430 return BAD_VALUE;
11431 }
11432 mUpdateDebugLevel = false;
11433 }
11434
11435 if(request->settings != NULL){
11436 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11437 if (blob_request)
11438 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11439 }
11440
11441 return rc;
11442}
11443
11444/*===========================================================================
11445 * FUNCTION : setReprocParameters
11446 *
11447 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11448 * return it.
11449 *
11450 * PARAMETERS :
11451 * @request : request that needs to be serviced
11452 *
11453 * RETURN : success: NO_ERROR
11454 * failure:
11455 *==========================================================================*/
11456int32_t QCamera3HardwareInterface::setReprocParameters(
11457 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11458 uint32_t snapshotStreamId)
11459{
11460 /*translate from camera_metadata_t type to parm_type_t*/
11461 int rc = 0;
11462
11463 if (NULL == request->settings){
11464 LOGE("Reprocess settings cannot be NULL");
11465 return BAD_VALUE;
11466 }
11467
11468 if (NULL == reprocParam) {
11469 LOGE("Invalid reprocessing metadata buffer");
11470 return BAD_VALUE;
11471 }
11472 clear_metadata_buffer(reprocParam);
11473
11474 /*we need to update the frame number in the parameters*/
11475 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11476 request->frame_number)) {
11477 LOGE("Failed to set the frame number in the parameters");
11478 return BAD_VALUE;
11479 }
11480
11481 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11482 if (rc < 0) {
11483 LOGE("Failed to translate reproc request");
11484 return rc;
11485 }
11486
11487 CameraMetadata frame_settings;
11488 frame_settings = request->settings;
11489 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11490 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11491 int32_t *crop_count =
11492 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11493 int32_t *crop_data =
11494 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11495 int32_t *roi_map =
11496 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11497 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11498 cam_crop_data_t crop_meta;
11499 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11500 crop_meta.num_of_streams = 1;
11501 crop_meta.crop_info[0].crop.left = crop_data[0];
11502 crop_meta.crop_info[0].crop.top = crop_data[1];
11503 crop_meta.crop_info[0].crop.width = crop_data[2];
11504 crop_meta.crop_info[0].crop.height = crop_data[3];
11505
11506 crop_meta.crop_info[0].roi_map.left =
11507 roi_map[0];
11508 crop_meta.crop_info[0].roi_map.top =
11509 roi_map[1];
11510 crop_meta.crop_info[0].roi_map.width =
11511 roi_map[2];
11512 crop_meta.crop_info[0].roi_map.height =
11513 roi_map[3];
11514
11515 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11516 rc = BAD_VALUE;
11517 }
11518 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11519 request->input_buffer->stream,
11520 crop_meta.crop_info[0].crop.left,
11521 crop_meta.crop_info[0].crop.top,
11522 crop_meta.crop_info[0].crop.width,
11523 crop_meta.crop_info[0].crop.height);
11524 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11525 request->input_buffer->stream,
11526 crop_meta.crop_info[0].roi_map.left,
11527 crop_meta.crop_info[0].roi_map.top,
11528 crop_meta.crop_info[0].roi_map.width,
11529 crop_meta.crop_info[0].roi_map.height);
11530 } else {
11531 LOGE("Invalid reprocess crop count %d!", *crop_count);
11532 }
11533 } else {
11534 LOGE("No crop data from matching output stream");
11535 }
11536
11537 /* These settings are not needed for regular requests so handle them specially for
11538 reprocess requests; information needed for EXIF tags */
11539 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11540 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11541 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11542 if (NAME_NOT_FOUND != val) {
11543 uint32_t flashMode = (uint32_t)val;
11544 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11545 rc = BAD_VALUE;
11546 }
11547 } else {
11548 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11549 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11550 }
11551 } else {
11552 LOGH("No flash mode in reprocess settings");
11553 }
11554
11555 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11556 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11557 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11558 rc = BAD_VALUE;
11559 }
11560 } else {
11561 LOGH("No flash state in reprocess settings");
11562 }
11563
11564 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11565 uint8_t *reprocessFlags =
11566 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11567 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11568 *reprocessFlags)) {
11569 rc = BAD_VALUE;
11570 }
11571 }
11572
Thierry Strudel54dc9782017-02-15 12:12:10 -080011573 // Add exif debug data to internal metadata
11574 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11575 mm_jpeg_debug_exif_params_t *debug_params =
11576 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11577 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11578 // AE
11579 if (debug_params->ae_debug_params_valid == TRUE) {
11580 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11581 debug_params->ae_debug_params);
11582 }
11583 // AWB
11584 if (debug_params->awb_debug_params_valid == TRUE) {
11585 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11586 debug_params->awb_debug_params);
11587 }
11588 // AF
11589 if (debug_params->af_debug_params_valid == TRUE) {
11590 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11591 debug_params->af_debug_params);
11592 }
11593 // ASD
11594 if (debug_params->asd_debug_params_valid == TRUE) {
11595 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11596 debug_params->asd_debug_params);
11597 }
11598 // Stats
11599 if (debug_params->stats_debug_params_valid == TRUE) {
11600 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11601 debug_params->stats_debug_params);
11602 }
11603 // BE Stats
11604 if (debug_params->bestats_debug_params_valid == TRUE) {
11605 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11606 debug_params->bestats_debug_params);
11607 }
11608 // BHIST
11609 if (debug_params->bhist_debug_params_valid == TRUE) {
11610 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11611 debug_params->bhist_debug_params);
11612 }
11613 // 3A Tuning
11614 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11615 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11616 debug_params->q3a_tuning_debug_params);
11617 }
11618 }
11619
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011620 // Add metadata which reprocess needs
11621 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11622 cam_reprocess_info_t *repro_info =
11623 (cam_reprocess_info_t *)frame_settings.find
11624 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011625 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011626 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011627 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011628 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011629 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011630 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011631 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011632 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011633 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011634 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011635 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011636 repro_info->pipeline_flip);
11637 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11638 repro_info->af_roi);
11639 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11640 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011641 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11642 CAM_INTF_PARM_ROTATION metadata then has been added in
11643 translateToHalMetadata. HAL need to keep this new rotation
11644 metadata. Otherwise, the old rotation info saved in the vendor tag
11645 would be used */
11646 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11647 CAM_INTF_PARM_ROTATION, reprocParam) {
11648 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11649 } else {
11650 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011651 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011652 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011653 }
11654
11655 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11656 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11657 roi.width and roi.height would be the final JPEG size.
11658 For now, HAL only checks this for reprocess request */
11659 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11660 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11661 uint8_t *enable =
11662 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11663 if (*enable == TRUE) {
11664 int32_t *crop_data =
11665 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11666 cam_stream_crop_info_t crop_meta;
11667 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11668 crop_meta.stream_id = 0;
11669 crop_meta.crop.left = crop_data[0];
11670 crop_meta.crop.top = crop_data[1];
11671 crop_meta.crop.width = crop_data[2];
11672 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011673 // The JPEG crop roi should match cpp output size
11674 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11675 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11676 crop_meta.roi_map.left = 0;
11677 crop_meta.roi_map.top = 0;
11678 crop_meta.roi_map.width = cpp_crop->crop.width;
11679 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011680 }
11681 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11682 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011683 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011684 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011685 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11686 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011687 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011688 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11689
11690 // Add JPEG scale information
11691 cam_dimension_t scale_dim;
11692 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11693 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11694 int32_t *roi =
11695 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11696 scale_dim.width = roi[2];
11697 scale_dim.height = roi[3];
11698 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11699 scale_dim);
11700 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11701 scale_dim.width, scale_dim.height, mCameraId);
11702 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011703 }
11704 }
11705
11706 return rc;
11707}
11708
11709/*===========================================================================
11710 * FUNCTION : saveRequestSettings
11711 *
11712 * DESCRIPTION: Add any settings that might have changed to the request settings
11713 * and save the settings to be applied on the frame
11714 *
11715 * PARAMETERS :
11716 * @jpegMetadata : the extracted and/or modified jpeg metadata
11717 * @request : request with initial settings
11718 *
11719 * RETURN :
11720 * camera_metadata_t* : pointer to the saved request settings
11721 *==========================================================================*/
11722camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11723 const CameraMetadata &jpegMetadata,
11724 camera3_capture_request_t *request)
11725{
11726 camera_metadata_t *resultMetadata;
11727 CameraMetadata camMetadata;
11728 camMetadata = request->settings;
11729
11730 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11731 int32_t thumbnail_size[2];
11732 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11733 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11734 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11735 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11736 }
11737
11738 if (request->input_buffer != NULL) {
11739 uint8_t reprocessFlags = 1;
11740 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11741 (uint8_t*)&reprocessFlags,
11742 sizeof(reprocessFlags));
11743 }
11744
11745 resultMetadata = camMetadata.release();
11746 return resultMetadata;
11747}
11748
11749/*===========================================================================
11750 * FUNCTION : setHalFpsRange
11751 *
11752 * DESCRIPTION: set FPS range parameter
11753 *
11754 *
11755 * PARAMETERS :
11756 * @settings : Metadata from framework
11757 * @hal_metadata: Metadata buffer
11758 *
11759 *
11760 * RETURN : success: NO_ERROR
11761 * failure:
11762 *==========================================================================*/
11763int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11764 metadata_buffer_t *hal_metadata)
11765{
11766 int32_t rc = NO_ERROR;
11767 cam_fps_range_t fps_range;
11768 fps_range.min_fps = (float)
11769 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11770 fps_range.max_fps = (float)
11771 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11772 fps_range.video_min_fps = fps_range.min_fps;
11773 fps_range.video_max_fps = fps_range.max_fps;
11774
11775 LOGD("aeTargetFpsRange fps: [%f %f]",
11776 fps_range.min_fps, fps_range.max_fps);
11777 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11778 * follows:
11779 * ---------------------------------------------------------------|
11780 * Video stream is absent in configure_streams |
11781 * (Camcorder preview before the first video record |
11782 * ---------------------------------------------------------------|
11783 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11784 * | | | vid_min/max_fps|
11785 * ---------------------------------------------------------------|
11786 * NO | [ 30, 240] | 240 | [240, 240] |
11787 * |-------------|-------------|----------------|
11788 * | [240, 240] | 240 | [240, 240] |
11789 * ---------------------------------------------------------------|
11790 * Video stream is present in configure_streams |
11791 * ---------------------------------------------------------------|
11792 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11793 * | | | vid_min/max_fps|
11794 * ---------------------------------------------------------------|
11795 * NO | [ 30, 240] | 240 | [240, 240] |
11796 * (camcorder prev |-------------|-------------|----------------|
11797 * after video rec | [240, 240] | 240 | [240, 240] |
11798 * is stopped) | | | |
11799 * ---------------------------------------------------------------|
11800 * YES | [ 30, 240] | 240 | [240, 240] |
11801 * |-------------|-------------|----------------|
11802 * | [240, 240] | 240 | [240, 240] |
11803 * ---------------------------------------------------------------|
11804 * When Video stream is absent in configure_streams,
11805 * preview fps = sensor_fps / batchsize
11806 * Eg: for 240fps at batchSize 4, preview = 60fps
11807 * for 120fps at batchSize 4, preview = 30fps
11808 *
11809 * When video stream is present in configure_streams, preview fps is as per
11810 * the ratio of preview buffers to video buffers requested in process
11811 * capture request
11812 */
11813 mBatchSize = 0;
11814 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11815 fps_range.min_fps = fps_range.video_max_fps;
11816 fps_range.video_min_fps = fps_range.video_max_fps;
11817 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11818 fps_range.max_fps);
11819 if (NAME_NOT_FOUND != val) {
11820 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11821 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11822 return BAD_VALUE;
11823 }
11824
11825 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11826 /* If batchmode is currently in progress and the fps changes,
11827 * set the flag to restart the sensor */
11828 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11829 (mHFRVideoFps != fps_range.max_fps)) {
11830 mNeedSensorRestart = true;
11831 }
11832 mHFRVideoFps = fps_range.max_fps;
11833 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11834 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11835 mBatchSize = MAX_HFR_BATCH_SIZE;
11836 }
11837 }
11838 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11839
11840 }
11841 } else {
11842 /* HFR mode is session param in backend/ISP. This should be reset when
11843 * in non-HFR mode */
11844 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11846 return BAD_VALUE;
11847 }
11848 }
11849 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11850 return BAD_VALUE;
11851 }
11852 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11853 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11854 return rc;
11855}
11856
11857/*===========================================================================
11858 * FUNCTION : translateToHalMetadata
11859 *
11860 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11861 *
11862 *
11863 * PARAMETERS :
11864 * @request : request sent from framework
11865 *
11866 *
11867 * RETURN : success: NO_ERROR
11868 * failure:
11869 *==========================================================================*/
11870int QCamera3HardwareInterface::translateToHalMetadata
11871 (const camera3_capture_request_t *request,
11872 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011873 uint32_t snapshotStreamId) {
11874 if (request == nullptr || hal_metadata == nullptr) {
11875 return BAD_VALUE;
11876 }
11877
11878 int64_t minFrameDuration = getMinFrameDuration(request);
11879
11880 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11881 minFrameDuration);
11882}
11883
11884int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11885 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11886 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11887
Thierry Strudel3d639192016-09-09 11:52:26 -070011888 int rc = 0;
11889 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011890 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011891
11892 /* Do not change the order of the following list unless you know what you are
11893 * doing.
11894 * The order is laid out in such a way that parameters in the front of the table
11895 * may be used to override the parameters later in the table. Examples are:
11896 * 1. META_MODE should precede AEC/AWB/AF MODE
11897 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11898 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11899 * 4. Any mode should precede it's corresponding settings
11900 */
11901 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11902 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11904 rc = BAD_VALUE;
11905 }
11906 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11907 if (rc != NO_ERROR) {
11908 LOGE("extractSceneMode failed");
11909 }
11910 }
11911
11912 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11913 uint8_t fwk_aeMode =
11914 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11915 uint8_t aeMode;
11916 int32_t redeye;
11917
11918 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11919 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011920 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11921 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011922 } else {
11923 aeMode = CAM_AE_MODE_ON;
11924 }
11925 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11926 redeye = 1;
11927 } else {
11928 redeye = 0;
11929 }
11930
11931 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11932 fwk_aeMode);
11933 if (NAME_NOT_FOUND != val) {
11934 int32_t flashMode = (int32_t)val;
11935 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11936 }
11937
11938 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11939 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11940 rc = BAD_VALUE;
11941 }
11942 }
11943
11944 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11945 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11946 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11947 fwk_whiteLevel);
11948 if (NAME_NOT_FOUND != val) {
11949 uint8_t whiteLevel = (uint8_t)val;
11950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11951 rc = BAD_VALUE;
11952 }
11953 }
11954 }
11955
11956 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11957 uint8_t fwk_cacMode =
11958 frame_settings.find(
11959 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11960 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11961 fwk_cacMode);
11962 if (NAME_NOT_FOUND != val) {
11963 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11964 bool entryAvailable = FALSE;
11965 // Check whether Frameworks set CAC mode is supported in device or not
11966 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11967 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11968 entryAvailable = TRUE;
11969 break;
11970 }
11971 }
11972 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11973 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11974 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11975 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11976 if (entryAvailable == FALSE) {
11977 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11978 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11979 } else {
11980 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11981 // High is not supported and so set the FAST as spec say's underlying
11982 // device implementation can be the same for both modes.
11983 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11984 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11985 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11986 // in order to avoid the fps drop due to high quality
11987 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11988 } else {
11989 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11990 }
11991 }
11992 }
11993 LOGD("Final cacMode is %d", cacMode);
11994 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11995 rc = BAD_VALUE;
11996 }
11997 } else {
11998 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11999 }
12000 }
12001
Thierry Strudel2896d122017-02-23 19:18:03 -080012002 char af_value[PROPERTY_VALUE_MAX];
12003 property_get("persist.camera.af.infinity", af_value, "0");
12004
Jason Lee84ae9972017-02-24 13:24:24 -080012005 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080012006 if (atoi(af_value) == 0) {
12007 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012008 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012009 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12010 fwk_focusMode);
12011 if (NAME_NOT_FOUND != val) {
12012 uint8_t focusMode = (uint8_t)val;
12013 LOGD("set focus mode %d", focusMode);
12014 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12015 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12016 rc = BAD_VALUE;
12017 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012018 }
12019 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012020 } else {
12021 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12022 LOGE("Focus forced to infinity %d", focusMode);
12023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12024 rc = BAD_VALUE;
12025 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012026 }
12027
Jason Lee84ae9972017-02-24 13:24:24 -080012028 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12029 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012030 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12031 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12032 focalDistance)) {
12033 rc = BAD_VALUE;
12034 }
12035 }
12036
12037 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12038 uint8_t fwk_antibandingMode =
12039 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12040 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12041 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12042 if (NAME_NOT_FOUND != val) {
12043 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012044 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12045 if (m60HzZone) {
12046 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12047 } else {
12048 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12049 }
12050 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012051 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12052 hal_antibandingMode)) {
12053 rc = BAD_VALUE;
12054 }
12055 }
12056 }
12057
12058 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12059 int32_t expCompensation = frame_settings.find(
12060 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12061 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12062 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12063 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12064 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012065 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012066 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12067 expCompensation)) {
12068 rc = BAD_VALUE;
12069 }
12070 }
12071
12072 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12073 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12074 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12075 rc = BAD_VALUE;
12076 }
12077 }
12078 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12079 rc = setHalFpsRange(frame_settings, hal_metadata);
12080 if (rc != NO_ERROR) {
12081 LOGE("setHalFpsRange failed");
12082 }
12083 }
12084
12085 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12086 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12087 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12088 rc = BAD_VALUE;
12089 }
12090 }
12091
12092 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12093 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12094 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12095 fwk_effectMode);
12096 if (NAME_NOT_FOUND != val) {
12097 uint8_t effectMode = (uint8_t)val;
12098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12099 rc = BAD_VALUE;
12100 }
12101 }
12102 }
12103
12104 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12105 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12106 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12107 colorCorrectMode)) {
12108 rc = BAD_VALUE;
12109 }
12110 }
12111
12112 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12113 cam_color_correct_gains_t colorCorrectGains;
12114 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12115 colorCorrectGains.gains[i] =
12116 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12117 }
12118 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12119 colorCorrectGains)) {
12120 rc = BAD_VALUE;
12121 }
12122 }
12123
12124 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12125 cam_color_correct_matrix_t colorCorrectTransform;
12126 cam_rational_type_t transform_elem;
12127 size_t num = 0;
12128 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12129 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12130 transform_elem.numerator =
12131 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12132 transform_elem.denominator =
12133 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12134 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12135 num++;
12136 }
12137 }
12138 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12139 colorCorrectTransform)) {
12140 rc = BAD_VALUE;
12141 }
12142 }
12143
12144 cam_trigger_t aecTrigger;
12145 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12146 aecTrigger.trigger_id = -1;
12147 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12148 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12149 aecTrigger.trigger =
12150 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12151 aecTrigger.trigger_id =
12152 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12153 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12154 aecTrigger)) {
12155 rc = BAD_VALUE;
12156 }
12157 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12158 aecTrigger.trigger, aecTrigger.trigger_id);
12159 }
12160
12161 /*af_trigger must come with a trigger id*/
12162 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12163 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12164 cam_trigger_t af_trigger;
12165 af_trigger.trigger =
12166 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12167 af_trigger.trigger_id =
12168 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12169 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12170 rc = BAD_VALUE;
12171 }
12172 LOGD("AfTrigger: %d AfTriggerID: %d",
12173 af_trigger.trigger, af_trigger.trigger_id);
12174 }
12175
12176 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12177 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12178 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12179 rc = BAD_VALUE;
12180 }
12181 }
12182 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12183 cam_edge_application_t edge_application;
12184 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012185
Thierry Strudel3d639192016-09-09 11:52:26 -070012186 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12187 edge_application.sharpness = 0;
12188 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012189 edge_application.sharpness =
12190 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12191 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12192 int32_t sharpness =
12193 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12194 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12195 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12196 LOGD("Setting edge mode sharpness %d", sharpness);
12197 edge_application.sharpness = sharpness;
12198 }
12199 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012200 }
12201 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12202 rc = BAD_VALUE;
12203 }
12204 }
12205
12206 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12207 int32_t respectFlashMode = 1;
12208 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12209 uint8_t fwk_aeMode =
12210 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012211 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12212 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12213 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012214 respectFlashMode = 0;
12215 LOGH("AE Mode controls flash, ignore android.flash.mode");
12216 }
12217 }
12218 if (respectFlashMode) {
12219 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12220 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12221 LOGH("flash mode after mapping %d", val);
12222 // To check: CAM_INTF_META_FLASH_MODE usage
12223 if (NAME_NOT_FOUND != val) {
12224 uint8_t flashMode = (uint8_t)val;
12225 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12226 rc = BAD_VALUE;
12227 }
12228 }
12229 }
12230 }
12231
12232 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12233 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12234 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12235 rc = BAD_VALUE;
12236 }
12237 }
12238
12239 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12240 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12241 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12242 flashFiringTime)) {
12243 rc = BAD_VALUE;
12244 }
12245 }
12246
12247 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12248 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12249 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12250 hotPixelMode)) {
12251 rc = BAD_VALUE;
12252 }
12253 }
12254
12255 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12256 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12257 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12258 lensAperture)) {
12259 rc = BAD_VALUE;
12260 }
12261 }
12262
12263 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12264 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12266 filterDensity)) {
12267 rc = BAD_VALUE;
12268 }
12269 }
12270
12271 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12272 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12274 focalLength)) {
12275 rc = BAD_VALUE;
12276 }
12277 }
12278
12279 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12280 uint8_t optStabMode =
12281 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12282 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12283 optStabMode)) {
12284 rc = BAD_VALUE;
12285 }
12286 }
12287
12288 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12289 uint8_t videoStabMode =
12290 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12291 LOGD("videoStabMode from APP = %d", videoStabMode);
12292 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12293 videoStabMode)) {
12294 rc = BAD_VALUE;
12295 }
12296 }
12297
12298
12299 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12300 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12301 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12302 noiseRedMode)) {
12303 rc = BAD_VALUE;
12304 }
12305 }
12306
12307 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12308 float reprocessEffectiveExposureFactor =
12309 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12310 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12311 reprocessEffectiveExposureFactor)) {
12312 rc = BAD_VALUE;
12313 }
12314 }
12315
12316 cam_crop_region_t scalerCropRegion;
12317 bool scalerCropSet = false;
12318 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12319 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12320 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12321 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12322 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12323
12324 // Map coordinate system from active array to sensor output.
12325 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12326 scalerCropRegion.width, scalerCropRegion.height);
12327
12328 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12329 scalerCropRegion)) {
12330 rc = BAD_VALUE;
12331 }
12332 scalerCropSet = true;
12333 }
12334
12335 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12336 int64_t sensorExpTime =
12337 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12338 LOGD("setting sensorExpTime %lld", sensorExpTime);
12339 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12340 sensorExpTime)) {
12341 rc = BAD_VALUE;
12342 }
12343 }
12344
12345 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12346 int64_t sensorFrameDuration =
12347 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012348 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12349 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12350 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12351 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12352 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12353 sensorFrameDuration)) {
12354 rc = BAD_VALUE;
12355 }
12356 }
12357
12358 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12359 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12360 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12361 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12362 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12363 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12364 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12365 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12366 sensorSensitivity)) {
12367 rc = BAD_VALUE;
12368 }
12369 }
12370
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012371#ifndef USE_HAL_3_3
12372 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12373 int32_t ispSensitivity =
12374 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12375 if (ispSensitivity <
12376 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12377 ispSensitivity =
12378 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12379 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12380 }
12381 if (ispSensitivity >
12382 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12383 ispSensitivity =
12384 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12385 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12386 }
12387 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12388 ispSensitivity)) {
12389 rc = BAD_VALUE;
12390 }
12391 }
12392#endif
12393
Thierry Strudel3d639192016-09-09 11:52:26 -070012394 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12395 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12396 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12397 rc = BAD_VALUE;
12398 }
12399 }
12400
12401 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12402 uint8_t fwk_facedetectMode =
12403 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12404
12405 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12406 fwk_facedetectMode);
12407
12408 if (NAME_NOT_FOUND != val) {
12409 uint8_t facedetectMode = (uint8_t)val;
12410 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12411 facedetectMode)) {
12412 rc = BAD_VALUE;
12413 }
12414 }
12415 }
12416
Thierry Strudel54dc9782017-02-15 12:12:10 -080012417 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012418 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012419 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012420 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12421 histogramMode)) {
12422 rc = BAD_VALUE;
12423 }
12424 }
12425
12426 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12427 uint8_t sharpnessMapMode =
12428 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12429 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12430 sharpnessMapMode)) {
12431 rc = BAD_VALUE;
12432 }
12433 }
12434
12435 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12436 uint8_t tonemapMode =
12437 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12438 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12439 rc = BAD_VALUE;
12440 }
12441 }
12442 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12443 /*All tonemap channels will have the same number of points*/
12444 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12445 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12446 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12447 cam_rgb_tonemap_curves tonemapCurves;
12448 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12449 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12450 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12451 tonemapCurves.tonemap_points_cnt,
12452 CAM_MAX_TONEMAP_CURVE_SIZE);
12453 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12454 }
12455
12456 /* ch0 = G*/
12457 size_t point = 0;
12458 cam_tonemap_curve_t tonemapCurveGreen;
12459 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12460 for (size_t j = 0; j < 2; j++) {
12461 tonemapCurveGreen.tonemap_points[i][j] =
12462 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12463 point++;
12464 }
12465 }
12466 tonemapCurves.curves[0] = tonemapCurveGreen;
12467
12468 /* ch 1 = B */
12469 point = 0;
12470 cam_tonemap_curve_t tonemapCurveBlue;
12471 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12472 for (size_t j = 0; j < 2; j++) {
12473 tonemapCurveBlue.tonemap_points[i][j] =
12474 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12475 point++;
12476 }
12477 }
12478 tonemapCurves.curves[1] = tonemapCurveBlue;
12479
12480 /* ch 2 = R */
12481 point = 0;
12482 cam_tonemap_curve_t tonemapCurveRed;
12483 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12484 for (size_t j = 0; j < 2; j++) {
12485 tonemapCurveRed.tonemap_points[i][j] =
12486 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12487 point++;
12488 }
12489 }
12490 tonemapCurves.curves[2] = tonemapCurveRed;
12491
12492 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12493 tonemapCurves)) {
12494 rc = BAD_VALUE;
12495 }
12496 }
12497
12498 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12499 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12500 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12501 captureIntent)) {
12502 rc = BAD_VALUE;
12503 }
12504 }
12505
12506 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12507 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12508 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12509 blackLevelLock)) {
12510 rc = BAD_VALUE;
12511 }
12512 }
12513
12514 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12515 uint8_t lensShadingMapMode =
12516 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12517 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12518 lensShadingMapMode)) {
12519 rc = BAD_VALUE;
12520 }
12521 }
12522
12523 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12524 cam_area_t roi;
12525 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012526 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012527
12528 // Map coordinate system from active array to sensor output.
12529 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12530 roi.rect.height);
12531
12532 if (scalerCropSet) {
12533 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12534 }
12535 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12536 rc = BAD_VALUE;
12537 }
12538 }
12539
12540 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12541 cam_area_t roi;
12542 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012543 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012544
12545 // Map coordinate system from active array to sensor output.
12546 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12547 roi.rect.height);
12548
12549 if (scalerCropSet) {
12550 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12551 }
12552 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12553 rc = BAD_VALUE;
12554 }
12555 }
12556
12557 // CDS for non-HFR non-video mode
12558 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12559 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12560 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12561 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12562 LOGE("Invalid CDS mode %d!", *fwk_cds);
12563 } else {
12564 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12565 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12566 rc = BAD_VALUE;
12567 }
12568 }
12569 }
12570
Thierry Strudel04e026f2016-10-10 11:27:36 -070012571 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012572 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012573 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012574 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12575 }
12576 if (m_bVideoHdrEnabled)
12577 vhdr = CAM_VIDEO_HDR_MODE_ON;
12578
Thierry Strudel54dc9782017-02-15 12:12:10 -080012579 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12580
12581 if(vhdr != curr_hdr_state)
12582 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12583
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012584 rc = setVideoHdrMode(mParameters, vhdr);
12585 if (rc != NO_ERROR) {
12586 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012587 }
12588
12589 //IR
12590 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12591 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12592 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012593 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12594 uint8_t isIRon = 0;
12595
12596 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012597 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12598 LOGE("Invalid IR mode %d!", fwk_ir);
12599 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012600 if(isIRon != curr_ir_state )
12601 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12602
Thierry Strudel04e026f2016-10-10 11:27:36 -070012603 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12604 CAM_INTF_META_IR_MODE, fwk_ir)) {
12605 rc = BAD_VALUE;
12606 }
12607 }
12608 }
12609
Thierry Strudel54dc9782017-02-15 12:12:10 -080012610 //Binning Correction Mode
12611 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12612 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12613 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12614 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12615 || (0 > fwk_binning_correction)) {
12616 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12617 } else {
12618 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12619 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12620 rc = BAD_VALUE;
12621 }
12622 }
12623 }
12624
Thierry Strudel269c81a2016-10-12 12:13:59 -070012625 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12626 float aec_speed;
12627 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12628 LOGD("AEC Speed :%f", aec_speed);
12629 if ( aec_speed < 0 ) {
12630 LOGE("Invalid AEC mode %f!", aec_speed);
12631 } else {
12632 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12633 aec_speed)) {
12634 rc = BAD_VALUE;
12635 }
12636 }
12637 }
12638
12639 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12640 float awb_speed;
12641 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12642 LOGD("AWB Speed :%f", awb_speed);
12643 if ( awb_speed < 0 ) {
12644 LOGE("Invalid AWB mode %f!", awb_speed);
12645 } else {
12646 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12647 awb_speed)) {
12648 rc = BAD_VALUE;
12649 }
12650 }
12651 }
12652
Thierry Strudel3d639192016-09-09 11:52:26 -070012653 // TNR
12654 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12655 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12656 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012657 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012658 cam_denoise_param_t tnr;
12659 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12660 tnr.process_plates =
12661 (cam_denoise_process_type_t)frame_settings.find(
12662 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12663 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012664
12665 if(b_TnrRequested != curr_tnr_state)
12666 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12667
Thierry Strudel3d639192016-09-09 11:52:26 -070012668 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12669 rc = BAD_VALUE;
12670 }
12671 }
12672
Thierry Strudel54dc9782017-02-15 12:12:10 -080012673 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012674 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012675 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012676 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12677 *exposure_metering_mode)) {
12678 rc = BAD_VALUE;
12679 }
12680 }
12681
Thierry Strudel3d639192016-09-09 11:52:26 -070012682 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12683 int32_t fwk_testPatternMode =
12684 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12685 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12686 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12687
12688 if (NAME_NOT_FOUND != testPatternMode) {
12689 cam_test_pattern_data_t testPatternData;
12690 memset(&testPatternData, 0, sizeof(testPatternData));
12691 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12692 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12693 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12694 int32_t *fwk_testPatternData =
12695 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12696 testPatternData.r = fwk_testPatternData[0];
12697 testPatternData.b = fwk_testPatternData[3];
12698 switch (gCamCapability[mCameraId]->color_arrangement) {
12699 case CAM_FILTER_ARRANGEMENT_RGGB:
12700 case CAM_FILTER_ARRANGEMENT_GRBG:
12701 testPatternData.gr = fwk_testPatternData[1];
12702 testPatternData.gb = fwk_testPatternData[2];
12703 break;
12704 case CAM_FILTER_ARRANGEMENT_GBRG:
12705 case CAM_FILTER_ARRANGEMENT_BGGR:
12706 testPatternData.gr = fwk_testPatternData[2];
12707 testPatternData.gb = fwk_testPatternData[1];
12708 break;
12709 default:
12710 LOGE("color arrangement %d is not supported",
12711 gCamCapability[mCameraId]->color_arrangement);
12712 break;
12713 }
12714 }
12715 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12716 testPatternData)) {
12717 rc = BAD_VALUE;
12718 }
12719 } else {
12720 LOGE("Invalid framework sensor test pattern mode %d",
12721 fwk_testPatternMode);
12722 }
12723 }
12724
12725 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12726 size_t count = 0;
12727 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12728 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12729 gps_coords.data.d, gps_coords.count, count);
12730 if (gps_coords.count != count) {
12731 rc = BAD_VALUE;
12732 }
12733 }
12734
12735 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12736 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12737 size_t count = 0;
12738 const char *gps_methods_src = (const char *)
12739 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12740 memset(gps_methods, '\0', sizeof(gps_methods));
12741 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12742 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12743 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12744 if (GPS_PROCESSING_METHOD_SIZE != count) {
12745 rc = BAD_VALUE;
12746 }
12747 }
12748
12749 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12750 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12751 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12752 gps_timestamp)) {
12753 rc = BAD_VALUE;
12754 }
12755 }
12756
12757 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12758 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12759 cam_rotation_info_t rotation_info;
12760 if (orientation == 0) {
12761 rotation_info.rotation = ROTATE_0;
12762 } else if (orientation == 90) {
12763 rotation_info.rotation = ROTATE_90;
12764 } else if (orientation == 180) {
12765 rotation_info.rotation = ROTATE_180;
12766 } else if (orientation == 270) {
12767 rotation_info.rotation = ROTATE_270;
12768 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012769 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012770 rotation_info.streamId = snapshotStreamId;
12771 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12772 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12773 rc = BAD_VALUE;
12774 }
12775 }
12776
12777 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12778 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12779 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12780 rc = BAD_VALUE;
12781 }
12782 }
12783
12784 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12785 uint32_t thumb_quality = (uint32_t)
12786 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12788 thumb_quality)) {
12789 rc = BAD_VALUE;
12790 }
12791 }
12792
12793 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12794 cam_dimension_t dim;
12795 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12796 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12797 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12798 rc = BAD_VALUE;
12799 }
12800 }
12801
12802 // Internal metadata
12803 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12804 size_t count = 0;
12805 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12806 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12807 privatedata.data.i32, privatedata.count, count);
12808 if (privatedata.count != count) {
12809 rc = BAD_VALUE;
12810 }
12811 }
12812
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012813 // ISO/Exposure Priority
12814 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12815 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12816 cam_priority_mode_t mode =
12817 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12818 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12819 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12820 use_iso_exp_pty.previewOnly = FALSE;
12821 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12822 use_iso_exp_pty.value = *ptr;
12823
12824 if(CAM_ISO_PRIORITY == mode) {
12825 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12826 use_iso_exp_pty)) {
12827 rc = BAD_VALUE;
12828 }
12829 }
12830 else {
12831 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12832 use_iso_exp_pty)) {
12833 rc = BAD_VALUE;
12834 }
12835 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012836
12837 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12838 rc = BAD_VALUE;
12839 }
12840 }
12841 } else {
12842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12843 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012844 }
12845 }
12846
12847 // Saturation
12848 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12849 int32_t* use_saturation =
12850 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12852 rc = BAD_VALUE;
12853 }
12854 }
12855
Thierry Strudel3d639192016-09-09 11:52:26 -070012856 // EV step
12857 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12858 gCamCapability[mCameraId]->exp_compensation_step)) {
12859 rc = BAD_VALUE;
12860 }
12861
12862 // CDS info
12863 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12864 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12865 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12866
12867 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12868 CAM_INTF_META_CDS_DATA, *cdsData)) {
12869 rc = BAD_VALUE;
12870 }
12871 }
12872
Shuzhen Wang19463d72016-03-08 11:09:52 -080012873 // Hybrid AE
12874 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12875 uint8_t *hybrid_ae = (uint8_t *)
12876 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12877
12878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12879 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12880 rc = BAD_VALUE;
12881 }
12882 }
12883
Shuzhen Wang14415f52016-11-16 18:26:18 -080012884 // Histogram
12885 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12886 uint8_t histogramMode =
12887 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12888 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12889 histogramMode)) {
12890 rc = BAD_VALUE;
12891 }
12892 }
12893
12894 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12895 int32_t histogramBins =
12896 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12897 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12898 histogramBins)) {
12899 rc = BAD_VALUE;
12900 }
12901 }
12902
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012903 // Tracking AF
12904 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12905 uint8_t trackingAfTrigger =
12906 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12907 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12908 trackingAfTrigger)) {
12909 rc = BAD_VALUE;
12910 }
12911 }
12912
Thierry Strudel3d639192016-09-09 11:52:26 -070012913 return rc;
12914}
12915
12916/*===========================================================================
12917 * FUNCTION : captureResultCb
12918 *
12919 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12920 *
12921 * PARAMETERS :
12922 * @frame : frame information from mm-camera-interface
12923 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12924 * @userdata: userdata
12925 *
12926 * RETURN : NONE
12927 *==========================================================================*/
12928void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12929 camera3_stream_buffer_t *buffer,
12930 uint32_t frame_number, bool isInputBuffer, void *userdata)
12931{
12932 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12933 if (hw == NULL) {
12934 LOGE("Invalid hw %p", hw);
12935 return;
12936 }
12937
12938 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12939 return;
12940}
12941
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012942/*===========================================================================
12943 * FUNCTION : setBufferErrorStatus
12944 *
12945 * DESCRIPTION: Callback handler for channels to report any buffer errors
12946 *
12947 * PARAMETERS :
12948 * @ch : Channel on which buffer error is reported from
12949 * @frame_number : frame number on which buffer error is reported on
12950 * @buffer_status : buffer error status
12951 * @userdata: userdata
12952 *
12953 * RETURN : NONE
12954 *==========================================================================*/
12955void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12956 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12957{
12958 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12959 if (hw == NULL) {
12960 LOGE("Invalid hw %p", hw);
12961 return;
12962 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012963
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012964 hw->setBufferErrorStatus(ch, frame_number, err);
12965 return;
12966}
12967
12968void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12969 uint32_t frameNumber, camera3_buffer_status_t err)
12970{
12971 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12972 pthread_mutex_lock(&mMutex);
12973
12974 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12975 if (req.frame_number != frameNumber)
12976 continue;
12977 for (auto& k : req.mPendingBufferList) {
12978 if(k.stream->priv == ch) {
12979 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12980 }
12981 }
12982 }
12983
12984 pthread_mutex_unlock(&mMutex);
12985 return;
12986}
Thierry Strudel3d639192016-09-09 11:52:26 -070012987/*===========================================================================
12988 * FUNCTION : initialize
12989 *
12990 * DESCRIPTION: Pass framework callback pointers to HAL
12991 *
12992 * PARAMETERS :
12993 *
12994 *
12995 * RETURN : Success : 0
12996 * Failure: -ENODEV
12997 *==========================================================================*/
12998
12999int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13000 const camera3_callback_ops_t *callback_ops)
13001{
13002 LOGD("E");
13003 QCamera3HardwareInterface *hw =
13004 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13005 if (!hw) {
13006 LOGE("NULL camera device");
13007 return -ENODEV;
13008 }
13009
13010 int rc = hw->initialize(callback_ops);
13011 LOGD("X");
13012 return rc;
13013}
13014
13015/*===========================================================================
13016 * FUNCTION : configure_streams
13017 *
13018 * DESCRIPTION:
13019 *
13020 * PARAMETERS :
13021 *
13022 *
13023 * RETURN : Success: 0
13024 * Failure: -EINVAL (if stream configuration is invalid)
13025 * -ENODEV (fatal error)
13026 *==========================================================================*/
13027
13028int QCamera3HardwareInterface::configure_streams(
13029 const struct camera3_device *device,
13030 camera3_stream_configuration_t *stream_list)
13031{
13032 LOGD("E");
13033 QCamera3HardwareInterface *hw =
13034 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13035 if (!hw) {
13036 LOGE("NULL camera device");
13037 return -ENODEV;
13038 }
13039 int rc = hw->configureStreams(stream_list);
13040 LOGD("X");
13041 return rc;
13042}
13043
13044/*===========================================================================
13045 * FUNCTION : construct_default_request_settings
13046 *
13047 * DESCRIPTION: Configure a settings buffer to meet the required use case
13048 *
13049 * PARAMETERS :
13050 *
13051 *
13052 * RETURN : Success: Return valid metadata
13053 * Failure: Return NULL
13054 *==========================================================================*/
13055const camera_metadata_t* QCamera3HardwareInterface::
13056 construct_default_request_settings(const struct camera3_device *device,
13057 int type)
13058{
13059
13060 LOGD("E");
13061 camera_metadata_t* fwk_metadata = NULL;
13062 QCamera3HardwareInterface *hw =
13063 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13064 if (!hw) {
13065 LOGE("NULL camera device");
13066 return NULL;
13067 }
13068
13069 fwk_metadata = hw->translateCapabilityToMetadata(type);
13070
13071 LOGD("X");
13072 return fwk_metadata;
13073}
13074
13075/*===========================================================================
13076 * FUNCTION : process_capture_request
13077 *
13078 * DESCRIPTION:
13079 *
13080 * PARAMETERS :
13081 *
13082 *
13083 * RETURN :
13084 *==========================================================================*/
13085int QCamera3HardwareInterface::process_capture_request(
13086 const struct camera3_device *device,
13087 camera3_capture_request_t *request)
13088{
13089 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013090 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013091 QCamera3HardwareInterface *hw =
13092 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13093 if (!hw) {
13094 LOGE("NULL camera device");
13095 return -EINVAL;
13096 }
13097
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013098 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013099 LOGD("X");
13100 return rc;
13101}
13102
13103/*===========================================================================
13104 * FUNCTION : dump
13105 *
13106 * DESCRIPTION:
13107 *
13108 * PARAMETERS :
13109 *
13110 *
13111 * RETURN :
13112 *==========================================================================*/
13113
13114void QCamera3HardwareInterface::dump(
13115 const struct camera3_device *device, int fd)
13116{
13117 /* Log level property is read when "adb shell dumpsys media.camera" is
13118 called so that the log level can be controlled without restarting
13119 the media server */
13120 getLogLevel();
13121
13122 LOGD("E");
13123 QCamera3HardwareInterface *hw =
13124 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13125 if (!hw) {
13126 LOGE("NULL camera device");
13127 return;
13128 }
13129
13130 hw->dump(fd);
13131 LOGD("X");
13132 return;
13133}
13134
13135/*===========================================================================
13136 * FUNCTION : flush
13137 *
13138 * DESCRIPTION:
13139 *
13140 * PARAMETERS :
13141 *
13142 *
13143 * RETURN :
13144 *==========================================================================*/
13145
13146int QCamera3HardwareInterface::flush(
13147 const struct camera3_device *device)
13148{
13149 int rc;
13150 LOGD("E");
13151 QCamera3HardwareInterface *hw =
13152 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13153 if (!hw) {
13154 LOGE("NULL camera device");
13155 return -EINVAL;
13156 }
13157
13158 pthread_mutex_lock(&hw->mMutex);
13159 // Validate current state
13160 switch (hw->mState) {
13161 case STARTED:
13162 /* valid state */
13163 break;
13164
13165 case ERROR:
13166 pthread_mutex_unlock(&hw->mMutex);
13167 hw->handleCameraDeviceError();
13168 return -ENODEV;
13169
13170 default:
13171 LOGI("Flush returned during state %d", hw->mState);
13172 pthread_mutex_unlock(&hw->mMutex);
13173 return 0;
13174 }
13175 pthread_mutex_unlock(&hw->mMutex);
13176
13177 rc = hw->flush(true /* restart channels */ );
13178 LOGD("X");
13179 return rc;
13180}
13181
13182/*===========================================================================
13183 * FUNCTION : close_camera_device
13184 *
13185 * DESCRIPTION:
13186 *
13187 * PARAMETERS :
13188 *
13189 *
13190 * RETURN :
13191 *==========================================================================*/
13192int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13193{
13194 int ret = NO_ERROR;
13195 QCamera3HardwareInterface *hw =
13196 reinterpret_cast<QCamera3HardwareInterface *>(
13197 reinterpret_cast<camera3_device_t *>(device)->priv);
13198 if (!hw) {
13199 LOGE("NULL camera device");
13200 return BAD_VALUE;
13201 }
13202
13203 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13204 delete hw;
13205 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013206 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013207 return ret;
13208}
13209
13210/*===========================================================================
13211 * FUNCTION : getWaveletDenoiseProcessPlate
13212 *
13213 * DESCRIPTION: query wavelet denoise process plate
13214 *
13215 * PARAMETERS : None
13216 *
13217 * RETURN : WNR prcocess plate value
13218 *==========================================================================*/
13219cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13220{
13221 char prop[PROPERTY_VALUE_MAX];
13222 memset(prop, 0, sizeof(prop));
13223 property_get("persist.denoise.process.plates", prop, "0");
13224 int processPlate = atoi(prop);
13225 switch(processPlate) {
13226 case 0:
13227 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13228 case 1:
13229 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13230 case 2:
13231 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13232 case 3:
13233 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13234 default:
13235 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13236 }
13237}
13238
13239
13240/*===========================================================================
13241 * FUNCTION : getTemporalDenoiseProcessPlate
13242 *
13243 * DESCRIPTION: query temporal denoise process plate
13244 *
13245 * PARAMETERS : None
13246 *
13247 * RETURN : TNR prcocess plate value
13248 *==========================================================================*/
13249cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13250{
13251 char prop[PROPERTY_VALUE_MAX];
13252 memset(prop, 0, sizeof(prop));
13253 property_get("persist.tnr.process.plates", prop, "0");
13254 int processPlate = atoi(prop);
13255 switch(processPlate) {
13256 case 0:
13257 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13258 case 1:
13259 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13260 case 2:
13261 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13262 case 3:
13263 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13264 default:
13265 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13266 }
13267}
13268
13269
13270/*===========================================================================
13271 * FUNCTION : extractSceneMode
13272 *
13273 * DESCRIPTION: Extract scene mode from frameworks set metadata
13274 *
13275 * PARAMETERS :
13276 * @frame_settings: CameraMetadata reference
13277 * @metaMode: ANDROID_CONTORL_MODE
13278 * @hal_metadata: hal metadata structure
13279 *
13280 * RETURN : None
13281 *==========================================================================*/
13282int32_t QCamera3HardwareInterface::extractSceneMode(
13283 const CameraMetadata &frame_settings, uint8_t metaMode,
13284 metadata_buffer_t *hal_metadata)
13285{
13286 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013287 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13288
13289 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13290 LOGD("Ignoring control mode OFF_KEEP_STATE");
13291 return NO_ERROR;
13292 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013293
13294 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13295 camera_metadata_ro_entry entry =
13296 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13297 if (0 == entry.count)
13298 return rc;
13299
13300 uint8_t fwk_sceneMode = entry.data.u8[0];
13301
13302 int val = lookupHalName(SCENE_MODES_MAP,
13303 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13304 fwk_sceneMode);
13305 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013306 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013307 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013308 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013309 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013310
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013311 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13312 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13313 }
13314
13315 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13316 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013317 cam_hdr_param_t hdr_params;
13318 hdr_params.hdr_enable = 1;
13319 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13320 hdr_params.hdr_need_1x = false;
13321 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13322 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13323 rc = BAD_VALUE;
13324 }
13325 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013326
Thierry Strudel3d639192016-09-09 11:52:26 -070013327 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13328 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13329 rc = BAD_VALUE;
13330 }
13331 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013332
13333 if (mForceHdrSnapshot) {
13334 cam_hdr_param_t hdr_params;
13335 hdr_params.hdr_enable = 1;
13336 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13337 hdr_params.hdr_need_1x = false;
13338 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13339 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13340 rc = BAD_VALUE;
13341 }
13342 }
13343
Thierry Strudel3d639192016-09-09 11:52:26 -070013344 return rc;
13345}
13346
13347/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013348 * FUNCTION : setVideoHdrMode
13349 *
13350 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13351 *
13352 * PARAMETERS :
13353 * @hal_metadata: hal metadata structure
13354 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13355 *
13356 * RETURN : None
13357 *==========================================================================*/
13358int32_t QCamera3HardwareInterface::setVideoHdrMode(
13359 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13360{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013361 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13362 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13363 }
13364
13365 LOGE("Invalid Video HDR mode %d!", vhdr);
13366 return BAD_VALUE;
13367}
13368
13369/*===========================================================================
13370 * FUNCTION : setSensorHDR
13371 *
13372 * DESCRIPTION: Enable/disable sensor HDR.
13373 *
13374 * PARAMETERS :
13375 * @hal_metadata: hal metadata structure
13376 * @enable: boolean whether to enable/disable sensor HDR
13377 *
13378 * RETURN : None
13379 *==========================================================================*/
13380int32_t QCamera3HardwareInterface::setSensorHDR(
13381 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13382{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013383 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013384 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13385
13386 if (enable) {
13387 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13388 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13389 #ifdef _LE_CAMERA_
13390 //Default to staggered HDR for IOT
13391 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13392 #else
13393 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13394 #endif
13395 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13396 }
13397
13398 bool isSupported = false;
13399 switch (sensor_hdr) {
13400 case CAM_SENSOR_HDR_IN_SENSOR:
13401 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13402 CAM_QCOM_FEATURE_SENSOR_HDR) {
13403 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013404 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013405 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013406 break;
13407 case CAM_SENSOR_HDR_ZIGZAG:
13408 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13409 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13410 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013411 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013412 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013413 break;
13414 case CAM_SENSOR_HDR_STAGGERED:
13415 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13416 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13417 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013418 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013419 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013420 break;
13421 case CAM_SENSOR_HDR_OFF:
13422 isSupported = true;
13423 LOGD("Turning off sensor HDR");
13424 break;
13425 default:
13426 LOGE("HDR mode %d not supported", sensor_hdr);
13427 rc = BAD_VALUE;
13428 break;
13429 }
13430
13431 if(isSupported) {
13432 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13433 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13434 rc = BAD_VALUE;
13435 } else {
13436 if(!isVideoHdrEnable)
13437 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013438 }
13439 }
13440 return rc;
13441}
13442
13443/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013444 * FUNCTION : needRotationReprocess
13445 *
13446 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13447 *
13448 * PARAMETERS : none
13449 *
13450 * RETURN : true: needed
13451 * false: no need
13452 *==========================================================================*/
13453bool QCamera3HardwareInterface::needRotationReprocess()
13454{
13455 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13456 // current rotation is not zero, and pp has the capability to process rotation
13457 LOGH("need do reprocess for rotation");
13458 return true;
13459 }
13460
13461 return false;
13462}
13463
13464/*===========================================================================
13465 * FUNCTION : needReprocess
13466 *
13467 * DESCRIPTION: if reprocess in needed
13468 *
13469 * PARAMETERS : none
13470 *
13471 * RETURN : true: needed
13472 * false: no need
13473 *==========================================================================*/
13474bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13475{
13476 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13477 // TODO: add for ZSL HDR later
13478 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13479 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13480 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13481 return true;
13482 } else {
13483 LOGH("already post processed frame");
13484 return false;
13485 }
13486 }
13487 return needRotationReprocess();
13488}
13489
13490/*===========================================================================
13491 * FUNCTION : needJpegExifRotation
13492 *
13493 * DESCRIPTION: if rotation from jpeg is needed
13494 *
13495 * PARAMETERS : none
13496 *
13497 * RETURN : true: needed
13498 * false: no need
13499 *==========================================================================*/
13500bool QCamera3HardwareInterface::needJpegExifRotation()
13501{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013502 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013503 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13504 LOGD("Need use Jpeg EXIF Rotation");
13505 return true;
13506 }
13507 return false;
13508}
13509
13510/*===========================================================================
13511 * FUNCTION : addOfflineReprocChannel
13512 *
13513 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13514 * coming from input channel
13515 *
13516 * PARAMETERS :
13517 * @config : reprocess configuration
13518 * @inputChHandle : pointer to the input (source) channel
13519 *
13520 *
13521 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13522 *==========================================================================*/
13523QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13524 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13525{
13526 int32_t rc = NO_ERROR;
13527 QCamera3ReprocessChannel *pChannel = NULL;
13528
13529 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013530 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13531 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013532 if (NULL == pChannel) {
13533 LOGE("no mem for reprocess channel");
13534 return NULL;
13535 }
13536
13537 rc = pChannel->initialize(IS_TYPE_NONE);
13538 if (rc != NO_ERROR) {
13539 LOGE("init reprocess channel failed, ret = %d", rc);
13540 delete pChannel;
13541 return NULL;
13542 }
13543
13544 // pp feature config
13545 cam_pp_feature_config_t pp_config;
13546 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13547
13548 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13549 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13550 & CAM_QCOM_FEATURE_DSDN) {
13551 //Use CPP CDS incase h/w supports it.
13552 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13553 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13554 }
13555 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13556 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13557 }
13558
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013559 if (config.hdr_param.hdr_enable) {
13560 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13561 pp_config.hdr_param = config.hdr_param;
13562 }
13563
13564 if (mForceHdrSnapshot) {
13565 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13566 pp_config.hdr_param.hdr_enable = 1;
13567 pp_config.hdr_param.hdr_need_1x = 0;
13568 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13569 }
13570
Thierry Strudel3d639192016-09-09 11:52:26 -070013571 rc = pChannel->addReprocStreamsFromSource(pp_config,
13572 config,
13573 IS_TYPE_NONE,
13574 mMetadataChannel);
13575
13576 if (rc != NO_ERROR) {
13577 delete pChannel;
13578 return NULL;
13579 }
13580 return pChannel;
13581}
13582
13583/*===========================================================================
13584 * FUNCTION : getMobicatMask
13585 *
13586 * DESCRIPTION: returns mobicat mask
13587 *
13588 * PARAMETERS : none
13589 *
13590 * RETURN : mobicat mask
13591 *
13592 *==========================================================================*/
13593uint8_t QCamera3HardwareInterface::getMobicatMask()
13594{
13595 return m_MobicatMask;
13596}
13597
13598/*===========================================================================
13599 * FUNCTION : setMobicat
13600 *
13601 * DESCRIPTION: set Mobicat on/off.
13602 *
13603 * PARAMETERS :
13604 * @params : none
13605 *
13606 * RETURN : int32_t type of status
13607 * NO_ERROR -- success
13608 * none-zero failure code
13609 *==========================================================================*/
13610int32_t QCamera3HardwareInterface::setMobicat()
13611{
13612 char value [PROPERTY_VALUE_MAX];
13613 property_get("persist.camera.mobicat", value, "0");
13614 int32_t ret = NO_ERROR;
13615 uint8_t enableMobi = (uint8_t)atoi(value);
13616
13617 if (enableMobi) {
13618 tune_cmd_t tune_cmd;
13619 tune_cmd.type = SET_RELOAD_CHROMATIX;
13620 tune_cmd.module = MODULE_ALL;
13621 tune_cmd.value = TRUE;
13622 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13623 CAM_INTF_PARM_SET_VFE_COMMAND,
13624 tune_cmd);
13625
13626 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13627 CAM_INTF_PARM_SET_PP_COMMAND,
13628 tune_cmd);
13629 }
13630 m_MobicatMask = enableMobi;
13631
13632 return ret;
13633}
13634
13635/*===========================================================================
13636* FUNCTION : getLogLevel
13637*
13638* DESCRIPTION: Reads the log level property into a variable
13639*
13640* PARAMETERS :
13641* None
13642*
13643* RETURN :
13644* None
13645*==========================================================================*/
13646void QCamera3HardwareInterface::getLogLevel()
13647{
13648 char prop[PROPERTY_VALUE_MAX];
13649 uint32_t globalLogLevel = 0;
13650
13651 property_get("persist.camera.hal.debug", prop, "0");
13652 int val = atoi(prop);
13653 if (0 <= val) {
13654 gCamHal3LogLevel = (uint32_t)val;
13655 }
13656
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013657 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013658 gKpiDebugLevel = atoi(prop);
13659
13660 property_get("persist.camera.global.debug", prop, "0");
13661 val = atoi(prop);
13662 if (0 <= val) {
13663 globalLogLevel = (uint32_t)val;
13664 }
13665
13666 /* Highest log level among hal.logs and global.logs is selected */
13667 if (gCamHal3LogLevel < globalLogLevel)
13668 gCamHal3LogLevel = globalLogLevel;
13669
13670 return;
13671}
13672
13673/*===========================================================================
13674 * FUNCTION : validateStreamRotations
13675 *
13676 * DESCRIPTION: Check if the rotations requested are supported
13677 *
13678 * PARAMETERS :
13679 * @stream_list : streams to be configured
13680 *
13681 * RETURN : NO_ERROR on success
13682 * -EINVAL on failure
13683 *
13684 *==========================================================================*/
13685int QCamera3HardwareInterface::validateStreamRotations(
13686 camera3_stream_configuration_t *streamList)
13687{
13688 int rc = NO_ERROR;
13689
13690 /*
13691 * Loop through all streams requested in configuration
13692 * Check if unsupported rotations have been requested on any of them
13693 */
13694 for (size_t j = 0; j < streamList->num_streams; j++){
13695 camera3_stream_t *newStream = streamList->streams[j];
13696
Emilian Peev35ceeed2017-06-29 11:58:56 -070013697 switch(newStream->rotation) {
13698 case CAMERA3_STREAM_ROTATION_0:
13699 case CAMERA3_STREAM_ROTATION_90:
13700 case CAMERA3_STREAM_ROTATION_180:
13701 case CAMERA3_STREAM_ROTATION_270:
13702 //Expected values
13703 break;
13704 default:
13705 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13706 "type:%d and stream format:%d", __func__,
13707 newStream->rotation, newStream->stream_type,
13708 newStream->format);
13709 return -EINVAL;
13710 }
13711
Thierry Strudel3d639192016-09-09 11:52:26 -070013712 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13713 bool isImplDef = (newStream->format ==
13714 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13715 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13716 isImplDef);
13717
13718 if (isRotated && (!isImplDef || isZsl)) {
13719 LOGE("Error: Unsupported rotation of %d requested for stream"
13720 "type:%d and stream format:%d",
13721 newStream->rotation, newStream->stream_type,
13722 newStream->format);
13723 rc = -EINVAL;
13724 break;
13725 }
13726 }
13727
13728 return rc;
13729}
13730
13731/*===========================================================================
13732* FUNCTION : getFlashInfo
13733*
13734* DESCRIPTION: Retrieve information about whether the device has a flash.
13735*
13736* PARAMETERS :
13737* @cameraId : Camera id to query
13738* @hasFlash : Boolean indicating whether there is a flash device
13739* associated with given camera
13740* @flashNode : If a flash device exists, this will be its device node.
13741*
13742* RETURN :
13743* None
13744*==========================================================================*/
13745void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13746 bool& hasFlash,
13747 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13748{
13749 cam_capability_t* camCapability = gCamCapability[cameraId];
13750 if (NULL == camCapability) {
13751 hasFlash = false;
13752 flashNode[0] = '\0';
13753 } else {
13754 hasFlash = camCapability->flash_available;
13755 strlcpy(flashNode,
13756 (char*)camCapability->flash_dev_name,
13757 QCAMERA_MAX_FILEPATH_LENGTH);
13758 }
13759}
13760
13761/*===========================================================================
13762* FUNCTION : getEepromVersionInfo
13763*
13764* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13765*
13766* PARAMETERS : None
13767*
13768* RETURN : string describing EEPROM version
13769* "\0" if no such info available
13770*==========================================================================*/
13771const char *QCamera3HardwareInterface::getEepromVersionInfo()
13772{
13773 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13774}
13775
13776/*===========================================================================
13777* FUNCTION : getLdafCalib
13778*
13779* DESCRIPTION: Retrieve Laser AF calibration data
13780*
13781* PARAMETERS : None
13782*
13783* RETURN : Two uint32_t describing laser AF calibration data
13784* NULL if none is available.
13785*==========================================================================*/
13786const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13787{
13788 if (mLdafCalibExist) {
13789 return &mLdafCalib[0];
13790 } else {
13791 return NULL;
13792 }
13793}
13794
13795/*===========================================================================
13796 * FUNCTION : dynamicUpdateMetaStreamInfo
13797 *
13798 * DESCRIPTION: This function:
13799 * (1) stops all the channels
13800 * (2) returns error on pending requests and buffers
13801 * (3) sends metastream_info in setparams
13802 * (4) starts all channels
13803 * This is useful when sensor has to be restarted to apply any
13804 * settings such as frame rate from a different sensor mode
13805 *
13806 * PARAMETERS : None
13807 *
13808 * RETURN : NO_ERROR on success
13809 * Error codes on failure
13810 *
13811 *==========================================================================*/
13812int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13813{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013814 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013815 int rc = NO_ERROR;
13816
13817 LOGD("E");
13818
13819 rc = stopAllChannels();
13820 if (rc < 0) {
13821 LOGE("stopAllChannels failed");
13822 return rc;
13823 }
13824
13825 rc = notifyErrorForPendingRequests();
13826 if (rc < 0) {
13827 LOGE("notifyErrorForPendingRequests failed");
13828 return rc;
13829 }
13830
13831 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13832 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13833 "Format:%d",
13834 mStreamConfigInfo.type[i],
13835 mStreamConfigInfo.stream_sizes[i].width,
13836 mStreamConfigInfo.stream_sizes[i].height,
13837 mStreamConfigInfo.postprocess_mask[i],
13838 mStreamConfigInfo.format[i]);
13839 }
13840
13841 /* Send meta stream info once again so that ISP can start */
13842 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13843 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13844 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13845 mParameters);
13846 if (rc < 0) {
13847 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13848 }
13849
13850 rc = startAllChannels();
13851 if (rc < 0) {
13852 LOGE("startAllChannels failed");
13853 return rc;
13854 }
13855
13856 LOGD("X");
13857 return rc;
13858}
13859
13860/*===========================================================================
13861 * FUNCTION : stopAllChannels
13862 *
13863 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13864 *
13865 * PARAMETERS : None
13866 *
13867 * RETURN : NO_ERROR on success
13868 * Error codes on failure
13869 *
13870 *==========================================================================*/
13871int32_t QCamera3HardwareInterface::stopAllChannels()
13872{
13873 int32_t rc = NO_ERROR;
13874
13875 LOGD("Stopping all channels");
13876 // Stop the Streams/Channels
13877 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13878 it != mStreamInfo.end(); it++) {
13879 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13880 if (channel) {
13881 channel->stop();
13882 }
13883 (*it)->status = INVALID;
13884 }
13885
13886 if (mSupportChannel) {
13887 mSupportChannel->stop();
13888 }
13889 if (mAnalysisChannel) {
13890 mAnalysisChannel->stop();
13891 }
13892 if (mRawDumpChannel) {
13893 mRawDumpChannel->stop();
13894 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013895 if (mHdrPlusRawSrcChannel) {
13896 mHdrPlusRawSrcChannel->stop();
13897 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013898 if (mMetadataChannel) {
13899 /* If content of mStreamInfo is not 0, there is metadata stream */
13900 mMetadataChannel->stop();
13901 }
13902
13903 LOGD("All channels stopped");
13904 return rc;
13905}
13906
13907/*===========================================================================
13908 * FUNCTION : startAllChannels
13909 *
13910 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13911 *
13912 * PARAMETERS : None
13913 *
13914 * RETURN : NO_ERROR on success
13915 * Error codes on failure
13916 *
13917 *==========================================================================*/
13918int32_t QCamera3HardwareInterface::startAllChannels()
13919{
13920 int32_t rc = NO_ERROR;
13921
13922 LOGD("Start all channels ");
13923 // Start the Streams/Channels
13924 if (mMetadataChannel) {
13925 /* If content of mStreamInfo is not 0, there is metadata stream */
13926 rc = mMetadataChannel->start();
13927 if (rc < 0) {
13928 LOGE("META channel start failed");
13929 return rc;
13930 }
13931 }
13932 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13933 it != mStreamInfo.end(); it++) {
13934 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13935 if (channel) {
13936 rc = channel->start();
13937 if (rc < 0) {
13938 LOGE("channel start failed");
13939 return rc;
13940 }
13941 }
13942 }
13943 if (mAnalysisChannel) {
13944 mAnalysisChannel->start();
13945 }
13946 if (mSupportChannel) {
13947 rc = mSupportChannel->start();
13948 if (rc < 0) {
13949 LOGE("Support channel start failed");
13950 return rc;
13951 }
13952 }
13953 if (mRawDumpChannel) {
13954 rc = mRawDumpChannel->start();
13955 if (rc < 0) {
13956 LOGE("RAW dump channel start failed");
13957 return rc;
13958 }
13959 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013960 if (mHdrPlusRawSrcChannel) {
13961 rc = mHdrPlusRawSrcChannel->start();
13962 if (rc < 0) {
13963 LOGE("HDR+ RAW channel start failed");
13964 return rc;
13965 }
13966 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013967
13968 LOGD("All channels started");
13969 return rc;
13970}
13971
13972/*===========================================================================
13973 * FUNCTION : notifyErrorForPendingRequests
13974 *
13975 * DESCRIPTION: This function sends error for all the pending requests/buffers
13976 *
13977 * PARAMETERS : None
13978 *
13979 * RETURN : Error codes
13980 * NO_ERROR on success
13981 *
13982 *==========================================================================*/
13983int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13984{
Emilian Peev7650c122017-01-19 08:24:33 -080013985 notifyErrorFoPendingDepthData(mDepthChannel);
13986
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013987 auto pendingRequest = mPendingRequestsList.begin();
13988 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013989
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013990 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13991 // buffers (for which buffers aren't sent yet).
13992 while (pendingRequest != mPendingRequestsList.end() ||
13993 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13994 if (pendingRequest == mPendingRequestsList.end() ||
13995 pendingBuffer->frame_number < pendingRequest->frame_number) {
13996 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13997 // with error.
13998 for (auto &info : pendingBuffer->mPendingBufferList) {
13999 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014000 camera3_notify_msg_t notify_msg;
14001 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14002 notify_msg.type = CAMERA3_MSG_ERROR;
14003 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014004 notify_msg.message.error.error_stream = info.stream;
14005 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014006 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014007
14008 camera3_stream_buffer_t buffer = {};
14009 buffer.acquire_fence = -1;
14010 buffer.release_fence = -1;
14011 buffer.buffer = info.buffer;
14012 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14013 buffer.stream = info.stream;
14014 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014015 }
14016
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014017 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14018 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14019 pendingBuffer->frame_number > pendingRequest->frame_number) {
14020 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014021 camera3_notify_msg_t notify_msg;
14022 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14023 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014024 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14025 notify_msg.message.error.error_stream = nullptr;
14026 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014027 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014028
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014029 if (pendingRequest->input_buffer != nullptr) {
14030 camera3_capture_result result = {};
14031 result.frame_number = pendingRequest->frame_number;
14032 result.result = nullptr;
14033 result.input_buffer = pendingRequest->input_buffer;
14034 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014035 }
14036
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014037 mShutterDispatcher.clear(pendingRequest->frame_number);
14038 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14039 } else {
14040 // If both buffers and result metadata weren't sent yet, notify about a request error
14041 // and return buffers with error.
14042 for (auto &info : pendingBuffer->mPendingBufferList) {
14043 camera3_notify_msg_t notify_msg;
14044 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14045 notify_msg.type = CAMERA3_MSG_ERROR;
14046 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14047 notify_msg.message.error.error_stream = info.stream;
14048 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14049 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014050
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014051 camera3_stream_buffer_t buffer = {};
14052 buffer.acquire_fence = -1;
14053 buffer.release_fence = -1;
14054 buffer.buffer = info.buffer;
14055 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14056 buffer.stream = info.stream;
14057 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14058 }
14059
14060 if (pendingRequest->input_buffer != nullptr) {
14061 camera3_capture_result result = {};
14062 result.frame_number = pendingRequest->frame_number;
14063 result.result = nullptr;
14064 result.input_buffer = pendingRequest->input_buffer;
14065 orchestrateResult(&result);
14066 }
14067
14068 mShutterDispatcher.clear(pendingRequest->frame_number);
14069 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14070 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014071 }
14072 }
14073
14074 /* Reset pending frame Drop list and requests list */
14075 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014076 mShutterDispatcher.clear();
14077 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014078 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014079 LOGH("Cleared all the pending buffers ");
14080
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014081 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014082}
14083
14084bool QCamera3HardwareInterface::isOnEncoder(
14085 const cam_dimension_t max_viewfinder_size,
14086 uint32_t width, uint32_t height)
14087{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014088 return ((width > (uint32_t)max_viewfinder_size.width) ||
14089 (height > (uint32_t)max_viewfinder_size.height) ||
14090 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14091 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014092}
14093
14094/*===========================================================================
14095 * FUNCTION : setBundleInfo
14096 *
14097 * DESCRIPTION: Set bundle info for all streams that are bundle.
14098 *
14099 * PARAMETERS : None
14100 *
14101 * RETURN : NO_ERROR on success
14102 * Error codes on failure
14103 *==========================================================================*/
14104int32_t QCamera3HardwareInterface::setBundleInfo()
14105{
14106 int32_t rc = NO_ERROR;
14107
14108 if (mChannelHandle) {
14109 cam_bundle_config_t bundleInfo;
14110 memset(&bundleInfo, 0, sizeof(bundleInfo));
14111 rc = mCameraHandle->ops->get_bundle_info(
14112 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14113 if (rc != NO_ERROR) {
14114 LOGE("get_bundle_info failed");
14115 return rc;
14116 }
14117 if (mAnalysisChannel) {
14118 mAnalysisChannel->setBundleInfo(bundleInfo);
14119 }
14120 if (mSupportChannel) {
14121 mSupportChannel->setBundleInfo(bundleInfo);
14122 }
14123 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14124 it != mStreamInfo.end(); it++) {
14125 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14126 channel->setBundleInfo(bundleInfo);
14127 }
14128 if (mRawDumpChannel) {
14129 mRawDumpChannel->setBundleInfo(bundleInfo);
14130 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014131 if (mHdrPlusRawSrcChannel) {
14132 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14133 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014134 }
14135
14136 return rc;
14137}
14138
14139/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014140 * FUNCTION : setInstantAEC
14141 *
14142 * DESCRIPTION: Set Instant AEC related params.
14143 *
14144 * PARAMETERS :
14145 * @meta: CameraMetadata reference
14146 *
14147 * RETURN : NO_ERROR on success
14148 * Error codes on failure
14149 *==========================================================================*/
14150int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14151{
14152 int32_t rc = NO_ERROR;
14153 uint8_t val = 0;
14154 char prop[PROPERTY_VALUE_MAX];
14155
14156 // First try to configure instant AEC from framework metadata
14157 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14158 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14159 }
14160
14161 // If framework did not set this value, try to read from set prop.
14162 if (val == 0) {
14163 memset(prop, 0, sizeof(prop));
14164 property_get("persist.camera.instant.aec", prop, "0");
14165 val = (uint8_t)atoi(prop);
14166 }
14167
14168 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14169 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14170 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14171 mInstantAEC = val;
14172 mInstantAECSettledFrameNumber = 0;
14173 mInstantAecFrameIdxCount = 0;
14174 LOGH("instantAEC value set %d",val);
14175 if (mInstantAEC) {
14176 memset(prop, 0, sizeof(prop));
14177 property_get("persist.camera.ae.instant.bound", prop, "10");
14178 int32_t aec_frame_skip_cnt = atoi(prop);
14179 if (aec_frame_skip_cnt >= 0) {
14180 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14181 } else {
14182 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14183 rc = BAD_VALUE;
14184 }
14185 }
14186 } else {
14187 LOGE("Bad instant aec value set %d", val);
14188 rc = BAD_VALUE;
14189 }
14190 return rc;
14191}
14192
14193/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014194 * FUNCTION : get_num_overall_buffers
14195 *
14196 * DESCRIPTION: Estimate number of pending buffers across all requests.
14197 *
14198 * PARAMETERS : None
14199 *
14200 * RETURN : Number of overall pending buffers
14201 *
14202 *==========================================================================*/
14203uint32_t PendingBuffersMap::get_num_overall_buffers()
14204{
14205 uint32_t sum_buffers = 0;
14206 for (auto &req : mPendingBuffersInRequest) {
14207 sum_buffers += req.mPendingBufferList.size();
14208 }
14209 return sum_buffers;
14210}
14211
14212/*===========================================================================
14213 * FUNCTION : removeBuf
14214 *
14215 * DESCRIPTION: Remove a matching buffer from tracker.
14216 *
14217 * PARAMETERS : @buffer: image buffer for the callback
14218 *
14219 * RETURN : None
14220 *
14221 *==========================================================================*/
14222void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14223{
14224 bool buffer_found = false;
14225 for (auto req = mPendingBuffersInRequest.begin();
14226 req != mPendingBuffersInRequest.end(); req++) {
14227 for (auto k = req->mPendingBufferList.begin();
14228 k != req->mPendingBufferList.end(); k++ ) {
14229 if (k->buffer == buffer) {
14230 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14231 req->frame_number, buffer);
14232 k = req->mPendingBufferList.erase(k);
14233 if (req->mPendingBufferList.empty()) {
14234 // Remove this request from Map
14235 req = mPendingBuffersInRequest.erase(req);
14236 }
14237 buffer_found = true;
14238 break;
14239 }
14240 }
14241 if (buffer_found) {
14242 break;
14243 }
14244 }
14245 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14246 get_num_overall_buffers());
14247}
14248
14249/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014250 * FUNCTION : getBufErrStatus
14251 *
14252 * DESCRIPTION: get buffer error status
14253 *
14254 * PARAMETERS : @buffer: buffer handle
14255 *
14256 * RETURN : Error status
14257 *
14258 *==========================================================================*/
14259int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14260{
14261 for (auto& req : mPendingBuffersInRequest) {
14262 for (auto& k : req.mPendingBufferList) {
14263 if (k.buffer == buffer)
14264 return k.bufStatus;
14265 }
14266 }
14267 return CAMERA3_BUFFER_STATUS_OK;
14268}
14269
14270/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014271 * FUNCTION : setPAAFSupport
14272 *
14273 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14274 * feature mask according to stream type and filter
14275 * arrangement
14276 *
14277 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14278 * @stream_type: stream type
14279 * @filter_arrangement: filter arrangement
14280 *
14281 * RETURN : None
14282 *==========================================================================*/
14283void QCamera3HardwareInterface::setPAAFSupport(
14284 cam_feature_mask_t& feature_mask,
14285 cam_stream_type_t stream_type,
14286 cam_color_filter_arrangement_t filter_arrangement)
14287{
Thierry Strudel3d639192016-09-09 11:52:26 -070014288 switch (filter_arrangement) {
14289 case CAM_FILTER_ARRANGEMENT_RGGB:
14290 case CAM_FILTER_ARRANGEMENT_GRBG:
14291 case CAM_FILTER_ARRANGEMENT_GBRG:
14292 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014293 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14294 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014295 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014296 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14297 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014298 }
14299 break;
14300 case CAM_FILTER_ARRANGEMENT_Y:
14301 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14302 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14303 }
14304 break;
14305 default:
14306 break;
14307 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014308 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14309 feature_mask, stream_type, filter_arrangement);
14310
14311
Thierry Strudel3d639192016-09-09 11:52:26 -070014312}
14313
14314/*===========================================================================
14315* FUNCTION : getSensorMountAngle
14316*
14317* DESCRIPTION: Retrieve sensor mount angle
14318*
14319* PARAMETERS : None
14320*
14321* RETURN : sensor mount angle in uint32_t
14322*==========================================================================*/
14323uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14324{
14325 return gCamCapability[mCameraId]->sensor_mount_angle;
14326}
14327
14328/*===========================================================================
14329* FUNCTION : getRelatedCalibrationData
14330*
14331* DESCRIPTION: Retrieve related system calibration data
14332*
14333* PARAMETERS : None
14334*
14335* RETURN : Pointer of related system calibration data
14336*==========================================================================*/
14337const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14338{
14339 return (const cam_related_system_calibration_data_t *)
14340 &(gCamCapability[mCameraId]->related_cam_calibration);
14341}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014342
14343/*===========================================================================
14344 * FUNCTION : is60HzZone
14345 *
14346 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14347 *
14348 * PARAMETERS : None
14349 *
14350 * RETURN : True if in 60Hz zone, False otherwise
14351 *==========================================================================*/
14352bool QCamera3HardwareInterface::is60HzZone()
14353{
14354 time_t t = time(NULL);
14355 struct tm lt;
14356
14357 struct tm* r = localtime_r(&t, &lt);
14358
14359 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14360 return true;
14361 else
14362 return false;
14363}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014364
14365/*===========================================================================
14366 * FUNCTION : adjustBlackLevelForCFA
14367 *
14368 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14369 * of bayer CFA (Color Filter Array).
14370 *
14371 * PARAMETERS : @input: black level pattern in the order of RGGB
14372 * @output: black level pattern in the order of CFA
14373 * @color_arrangement: CFA color arrangement
14374 *
14375 * RETURN : None
14376 *==========================================================================*/
14377template<typename T>
14378void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14379 T input[BLACK_LEVEL_PATTERN_CNT],
14380 T output[BLACK_LEVEL_PATTERN_CNT],
14381 cam_color_filter_arrangement_t color_arrangement)
14382{
14383 switch (color_arrangement) {
14384 case CAM_FILTER_ARRANGEMENT_GRBG:
14385 output[0] = input[1];
14386 output[1] = input[0];
14387 output[2] = input[3];
14388 output[3] = input[2];
14389 break;
14390 case CAM_FILTER_ARRANGEMENT_GBRG:
14391 output[0] = input[2];
14392 output[1] = input[3];
14393 output[2] = input[0];
14394 output[3] = input[1];
14395 break;
14396 case CAM_FILTER_ARRANGEMENT_BGGR:
14397 output[0] = input[3];
14398 output[1] = input[2];
14399 output[2] = input[1];
14400 output[3] = input[0];
14401 break;
14402 case CAM_FILTER_ARRANGEMENT_RGGB:
14403 output[0] = input[0];
14404 output[1] = input[1];
14405 output[2] = input[2];
14406 output[3] = input[3];
14407 break;
14408 default:
14409 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14410 break;
14411 }
14412}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014413
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014414void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14415 CameraMetadata &resultMetadata,
14416 std::shared_ptr<metadata_buffer_t> settings)
14417{
14418 if (settings == nullptr) {
14419 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14420 return;
14421 }
14422
14423 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14424 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14425 }
14426
14427 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14428 String8 str((const char *)gps_methods);
14429 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14430 }
14431
14432 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14433 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14434 }
14435
14436 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14437 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14438 }
14439
14440 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14441 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14442 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14443 }
14444
14445 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14446 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14447 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14448 }
14449
14450 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14451 int32_t fwk_thumb_size[2];
14452 fwk_thumb_size[0] = thumb_size->width;
14453 fwk_thumb_size[1] = thumb_size->height;
14454 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14455 }
14456
14457 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14458 uint8_t fwk_intent = intent[0];
14459 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14460 }
14461}
14462
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014463bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14464 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14465 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014466{
14467 if (hdrPlusRequest == nullptr) return false;
14468
14469 // Check noise reduction mode is high quality.
14470 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14471 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14472 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014473 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14474 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014475 return false;
14476 }
14477
14478 // Check edge mode is high quality.
14479 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14480 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14481 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14482 return false;
14483 }
14484
14485 if (request.num_output_buffers != 1 ||
14486 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14487 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014488 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14489 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14490 request.output_buffers[0].stream->width,
14491 request.output_buffers[0].stream->height,
14492 request.output_buffers[0].stream->format);
14493 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014494 return false;
14495 }
14496
14497 // Get a YUV buffer from pic channel.
14498 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14499 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14500 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14501 if (res != OK) {
14502 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14503 __FUNCTION__, strerror(-res), res);
14504 return false;
14505 }
14506
14507 pbcamera::StreamBuffer buffer;
14508 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014509 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014510 buffer.data = yuvBuffer->buffer;
14511 buffer.dataSize = yuvBuffer->frame_len;
14512
14513 pbcamera::CaptureRequest pbRequest;
14514 pbRequest.id = request.frame_number;
14515 pbRequest.outputBuffers.push_back(buffer);
14516
14517 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014518 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014519 if (res != OK) {
14520 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14521 strerror(-res), res);
14522 return false;
14523 }
14524
14525 hdrPlusRequest->yuvBuffer = yuvBuffer;
14526 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14527
14528 return true;
14529}
14530
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014531status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14532{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014533 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14534 return OK;
14535 }
14536
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014537 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014538 if (res != OK) {
14539 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14540 strerror(-res), res);
14541 return res;
14542 }
14543 gHdrPlusClientOpening = true;
14544
14545 return OK;
14546}
14547
Chien-Yu Chenee335912017-02-09 17:53:20 -080014548status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14549{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014550 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014551
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014552 if (mHdrPlusModeEnabled) {
14553 return OK;
14554 }
14555
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014556 // Check if gHdrPlusClient is opened or being opened.
14557 if (gHdrPlusClient == nullptr) {
14558 if (gHdrPlusClientOpening) {
14559 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14560 return OK;
14561 }
14562
14563 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014564 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014565 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14566 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014567 return res;
14568 }
14569
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014570 // When opening HDR+ client completes, HDR+ mode will be enabled.
14571 return OK;
14572
Chien-Yu Chenee335912017-02-09 17:53:20 -080014573 }
14574
14575 // Configure stream for HDR+.
14576 res = configureHdrPlusStreamsLocked();
14577 if (res != OK) {
14578 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014579 return res;
14580 }
14581
14582 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14583 res = gHdrPlusClient->setZslHdrPlusMode(true);
14584 if (res != OK) {
14585 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014586 return res;
14587 }
14588
14589 mHdrPlusModeEnabled = true;
14590 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14591
14592 return OK;
14593}
14594
14595void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14596{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014597 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014598 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014599 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14600 if (res != OK) {
14601 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14602 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014603
14604 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014605 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014606 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014607 }
14608
14609 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014610 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014611 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14612}
14613
14614status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014615{
14616 pbcamera::InputConfiguration inputConfig;
14617 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14618 status_t res = OK;
14619
14620 // Configure HDR+ client streams.
14621 // Get input config.
14622 if (mHdrPlusRawSrcChannel) {
14623 // HDR+ input buffers will be provided by HAL.
14624 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14625 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14626 if (res != OK) {
14627 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14628 __FUNCTION__, strerror(-res), res);
14629 return res;
14630 }
14631
14632 inputConfig.isSensorInput = false;
14633 } else {
14634 // Sensor MIPI will send data to Easel.
14635 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014636 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014637 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14638 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14639 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14640 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14641 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014642 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014643 if (mSensorModeInfo.num_raw_bits != 10) {
14644 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14645 mSensorModeInfo.num_raw_bits);
14646 return BAD_VALUE;
14647 }
14648
14649 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014650 }
14651
14652 // Get output configurations.
14653 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014654 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014655
14656 // Easel may need to output YUV output buffers if mPictureChannel was created.
14657 pbcamera::StreamConfiguration yuvOutputConfig;
14658 if (mPictureChannel != nullptr) {
14659 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14660 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14661 if (res != OK) {
14662 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14663 __FUNCTION__, strerror(-res), res);
14664
14665 return res;
14666 }
14667
14668 outputStreamConfigs.push_back(yuvOutputConfig);
14669 }
14670
14671 // TODO: consider other channels for YUV output buffers.
14672
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014673 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014674 if (res != OK) {
14675 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14676 strerror(-res), res);
14677 return res;
14678 }
14679
14680 return OK;
14681}
14682
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014683void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14684{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014685 if (client == nullptr) {
14686 ALOGE("%s: Opened client is null.", __FUNCTION__);
14687 return;
14688 }
14689
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014690 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014691 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14692
14693 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014694 if (!gHdrPlusClientOpening) {
14695 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14696 return;
14697 }
14698
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014699 gHdrPlusClient = std::move(client);
14700 gHdrPlusClientOpening = false;
14701
14702 // Set static metadata.
14703 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14704 if (res != OK) {
14705 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14706 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014707 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014708 gHdrPlusClient = nullptr;
14709 return;
14710 }
14711
14712 // Enable HDR+ mode.
14713 res = enableHdrPlusModeLocked();
14714 if (res != OK) {
14715 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14716 }
14717}
14718
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014719void QCamera3HardwareInterface::onOpenFailed(status_t err)
14720{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014721 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14722 Mutex::Autolock l(gHdrPlusClientLock);
14723 gHdrPlusClientOpening = false;
14724}
14725
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014726void QCamera3HardwareInterface::onFatalError()
14727{
14728 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14729
14730 // Set HAL state to error.
14731 pthread_mutex_lock(&mMutex);
14732 mState = ERROR;
14733 pthread_mutex_unlock(&mMutex);
14734
14735 handleCameraDeviceError();
14736}
14737
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014738void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014739 const camera_metadata_t &resultMetadata)
14740{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014741 if (result != nullptr) {
14742 if (result->outputBuffers.size() != 1) {
14743 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14744 result->outputBuffers.size());
14745 return;
14746 }
14747
14748 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14749 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14750 result->outputBuffers[0].streamId);
14751 return;
14752 }
14753
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014754 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014755 HdrPlusPendingRequest pendingRequest;
14756 {
14757 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14758 auto req = mHdrPlusPendingRequests.find(result->requestId);
14759 pendingRequest = req->second;
14760 }
14761
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014762 // Update the result metadata with the settings of the HDR+ still capture request because
14763 // the result metadata belongs to a ZSL buffer.
14764 CameraMetadata metadata;
14765 metadata = &resultMetadata;
14766 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14767 camera_metadata_t* updatedResultMetadata = metadata.release();
14768
14769 QCamera3PicChannel *picChannel =
14770 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14771
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014772 // Check if dumping HDR+ YUV output is enabled.
14773 char prop[PROPERTY_VALUE_MAX];
14774 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14775 bool dumpYuvOutput = atoi(prop);
14776
14777 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014778 // Dump yuv buffer to a ppm file.
14779 pbcamera::StreamConfiguration outputConfig;
14780 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14781 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14782 if (rc == OK) {
14783 char buf[FILENAME_MAX] = {};
14784 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14785 result->requestId, result->outputBuffers[0].streamId,
14786 outputConfig.image.width, outputConfig.image.height);
14787
14788 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14789 } else {
14790 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14791 __FUNCTION__, strerror(-rc), rc);
14792 }
14793 }
14794
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014795 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14796 auto halMetadata = std::make_shared<metadata_buffer_t>();
14797 clear_metadata_buffer(halMetadata.get());
14798
14799 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14800 // encoding.
14801 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14802 halStreamId, /*minFrameDuration*/0);
14803 if (res == OK) {
14804 // Return the buffer to pic channel for encoding.
14805 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14806 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14807 halMetadata);
14808 } else {
14809 // Return the buffer without encoding.
14810 // TODO: This should not happen but we may want to report an error buffer to camera
14811 // service.
14812 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14813 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14814 strerror(-res), res);
14815 }
14816
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014817 // Find the timestamp
14818 camera_metadata_ro_entry_t entry;
14819 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14820 ANDROID_SENSOR_TIMESTAMP, &entry);
14821 if (res != OK) {
14822 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14823 __FUNCTION__, result->requestId, strerror(-res), res);
14824 } else {
14825 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14826 }
14827
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014828 // Send HDR+ metadata to framework.
14829 {
14830 pthread_mutex_lock(&mMutex);
14831
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014832 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14833 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014834 pthread_mutex_unlock(&mMutex);
14835 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014836
14837 // Remove the HDR+ pending request.
14838 {
14839 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14840 auto req = mHdrPlusPendingRequests.find(result->requestId);
14841 mHdrPlusPendingRequests.erase(req);
14842 }
14843 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014844}
14845
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014846void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14847{
14848 if (failedResult == nullptr) {
14849 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14850 return;
14851 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014852
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014853 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014854
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014855 // Remove the pending HDR+ request.
14856 {
14857 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14858 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14859
14860 // Return the buffer to pic channel.
14861 QCamera3PicChannel *picChannel =
14862 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14863 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14864
14865 mHdrPlusPendingRequests.erase(pendingRequest);
14866 }
14867
14868 pthread_mutex_lock(&mMutex);
14869
14870 // Find the pending buffers.
14871 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14872 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14873 if (pendingBuffers->frame_number == failedResult->requestId) {
14874 break;
14875 }
14876 pendingBuffers++;
14877 }
14878
14879 // Send out buffer errors for the pending buffers.
14880 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14881 std::vector<camera3_stream_buffer_t> streamBuffers;
14882 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14883 // Prepare a stream buffer.
14884 camera3_stream_buffer_t streamBuffer = {};
14885 streamBuffer.stream = buffer.stream;
14886 streamBuffer.buffer = buffer.buffer;
14887 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14888 streamBuffer.acquire_fence = -1;
14889 streamBuffer.release_fence = -1;
14890
14891 streamBuffers.push_back(streamBuffer);
14892
14893 // Send out error buffer event.
14894 camera3_notify_msg_t notify_msg = {};
14895 notify_msg.type = CAMERA3_MSG_ERROR;
14896 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14897 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14898 notify_msg.message.error.error_stream = buffer.stream;
14899
14900 orchestrateNotify(&notify_msg);
14901 }
14902
14903 camera3_capture_result_t result = {};
14904 result.frame_number = pendingBuffers->frame_number;
14905 result.num_output_buffers = streamBuffers.size();
14906 result.output_buffers = &streamBuffers[0];
14907
14908 // Send out result with buffer errors.
14909 orchestrateResult(&result);
14910
14911 // Remove pending buffers.
14912 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14913 }
14914
14915 // Remove pending request.
14916 auto halRequest = mPendingRequestsList.begin();
14917 while (halRequest != mPendingRequestsList.end()) {
14918 if (halRequest->frame_number == failedResult->requestId) {
14919 mPendingRequestsList.erase(halRequest);
14920 break;
14921 }
14922 halRequest++;
14923 }
14924
14925 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014926}
14927
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014928
14929ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14930 mParent(parent) {}
14931
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014932void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014933{
14934 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014935
14936 if (isReprocess) {
14937 mReprocessShutters.emplace(frameNumber, Shutter());
14938 } else {
14939 mShutters.emplace(frameNumber, Shutter());
14940 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014941}
14942
14943void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14944{
14945 std::lock_guard<std::mutex> lock(mLock);
14946
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014947 std::map<uint32_t, Shutter> *shutters = nullptr;
14948
14949 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014950 auto shutter = mShutters.find(frameNumber);
14951 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014952 shutter = mReprocessShutters.find(frameNumber);
14953 if (shutter == mReprocessShutters.end()) {
14954 // Shutter was already sent.
14955 return;
14956 }
14957 shutters = &mReprocessShutters;
14958 } else {
14959 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014960 }
14961
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014962 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014963 shutter->second.ready = true;
14964 shutter->second.timestamp = timestamp;
14965
14966 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014967 shutter = shutters->begin();
14968 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014969 if (!shutter->second.ready) {
14970 // If this shutter is not ready, the following shutters can't be sent.
14971 break;
14972 }
14973
14974 camera3_notify_msg_t msg = {};
14975 msg.type = CAMERA3_MSG_SHUTTER;
14976 msg.message.shutter.frame_number = shutter->first;
14977 msg.message.shutter.timestamp = shutter->second.timestamp;
14978 mParent->orchestrateNotify(&msg);
14979
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014980 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014981 }
14982}
14983
14984void ShutterDispatcher::clear(uint32_t frameNumber)
14985{
14986 std::lock_guard<std::mutex> lock(mLock);
14987 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014988 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014989}
14990
14991void ShutterDispatcher::clear()
14992{
14993 std::lock_guard<std::mutex> lock(mLock);
14994
14995 // Log errors for stale shutters.
14996 for (auto &shutter : mShutters) {
14997 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14998 __FUNCTION__, shutter.first, shutter.second.ready,
14999 shutter.second.timestamp);
15000 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015001
15002 // Log errors for stale reprocess shutters.
15003 for (auto &shutter : mReprocessShutters) {
15004 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15005 __FUNCTION__, shutter.first, shutter.second.ready,
15006 shutter.second.timestamp);
15007 }
15008
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015009 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015010 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015011}
15012
15013OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15014 mParent(parent) {}
15015
15016status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15017{
15018 std::lock_guard<std::mutex> lock(mLock);
15019 mStreamBuffers.clear();
15020 if (!streamList) {
15021 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15022 return -EINVAL;
15023 }
15024
15025 // Create a "frame-number -> buffer" map for each stream.
15026 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15027 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15028 }
15029
15030 return OK;
15031}
15032
15033status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15034{
15035 std::lock_guard<std::mutex> lock(mLock);
15036
15037 // Find the "frame-number -> buffer" map for the stream.
15038 auto buffers = mStreamBuffers.find(stream);
15039 if (buffers == mStreamBuffers.end()) {
15040 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15041 return -EINVAL;
15042 }
15043
15044 // Create an unready buffer for this frame number.
15045 buffers->second.emplace(frameNumber, Buffer());
15046 return OK;
15047}
15048
15049void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15050 const camera3_stream_buffer_t &buffer)
15051{
15052 std::lock_guard<std::mutex> lock(mLock);
15053
15054 // Find the frame number -> buffer map for the stream.
15055 auto buffers = mStreamBuffers.find(buffer.stream);
15056 if (buffers == mStreamBuffers.end()) {
15057 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15058 return;
15059 }
15060
15061 // Find the unready buffer this frame number and mark it ready.
15062 auto pendingBuffer = buffers->second.find(frameNumber);
15063 if (pendingBuffer == buffers->second.end()) {
15064 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15065 return;
15066 }
15067
15068 pendingBuffer->second.ready = true;
15069 pendingBuffer->second.buffer = buffer;
15070
15071 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15072 pendingBuffer = buffers->second.begin();
15073 while (pendingBuffer != buffers->second.end()) {
15074 if (!pendingBuffer->second.ready) {
15075 // If this buffer is not ready, the following buffers can't be sent.
15076 break;
15077 }
15078
15079 camera3_capture_result_t result = {};
15080 result.frame_number = pendingBuffer->first;
15081 result.num_output_buffers = 1;
15082 result.output_buffers = &pendingBuffer->second.buffer;
15083
15084 // Send out result with buffer errors.
15085 mParent->orchestrateResult(&result);
15086
15087 pendingBuffer = buffers->second.erase(pendingBuffer);
15088 }
15089}
15090
15091void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15092{
15093 std::lock_guard<std::mutex> lock(mLock);
15094
15095 // Log errors for stale buffers.
15096 for (auto &buffers : mStreamBuffers) {
15097 for (auto &buffer : buffers.second) {
15098 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15099 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15100 }
15101 buffers.second.clear();
15102 }
15103
15104 if (clearConfiguredStreams) {
15105 mStreamBuffers.clear();
15106 }
15107}
15108
Thierry Strudel3d639192016-09-09 11:52:26 -070015109}; //end namespace qcamera