blob: c80e06e9eee9b631dd6aefcbee7c3088b9e70d9d [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
Jiyong Parkd4caeb72017-06-12 17:16:36 +090068using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070069using namespace android;
70
71namespace qcamera {
72
73#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
74
75#define EMPTY_PIPELINE_DELAY 2
76#define PARTIAL_RESULT_COUNT 2
77#define FRAME_SKIP_DELAY 0
78
79#define MAX_VALUE_8BIT ((1<<8)-1)
80#define MAX_VALUE_10BIT ((1<<10)-1)
81#define MAX_VALUE_12BIT ((1<<12)-1)
82
83#define VIDEO_4K_WIDTH 3840
84#define VIDEO_4K_HEIGHT 2160
85
Jason Leeb9e76432017-03-10 17:14:19 -080086#define MAX_EIS_WIDTH 3840
87#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070088
89#define MAX_RAW_STREAMS 1
90#define MAX_STALLING_STREAMS 1
91#define MAX_PROCESSED_STREAMS 3
92/* Batch mode is enabled only if FPS set is equal to or greater than this */
93#define MIN_FPS_FOR_BATCH_MODE (120)
94#define PREVIEW_FPS_FOR_HFR (30)
95#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080096#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070097#define MAX_HFR_BATCH_SIZE (8)
98#define REGIONS_TUPLE_COUNT 5
99#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
101#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700137// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
138#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
139
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700140// Whether to check for the GPU stride padding, or use the default
141//#define CHECK_GPU_PIXEL_ALIGNMENT
142
Thierry Strudel3d639192016-09-09 11:52:26 -0700143cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
144const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
145extern pthread_mutex_t gCamLock;
146volatile uint32_t gCamHal3LogLevel = 1;
147extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700148
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800149// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700151std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700152bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
153std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
154bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700155bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700156bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700157
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800158// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
159bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
161Mutex gHdrPlusClientLock; // Protect above Easel related variables.
162
Thierry Strudel3d639192016-09-09 11:52:26 -0700163
164const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
165 {"On", CAM_CDS_MODE_ON},
166 {"Off", CAM_CDS_MODE_OFF},
167 {"Auto",CAM_CDS_MODE_AUTO}
168};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_video_hdr_mode_t,
171 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
172 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
173 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
174};
175
Thierry Strudel54dc9782017-02-15 12:12:10 -0800176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_binning_correction_mode_t,
178 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
179 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
180 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
181};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700182
183const QCamera3HardwareInterface::QCameraMap<
184 camera_metadata_enum_android_ir_mode_t,
185 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
186 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
187 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
188 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
189};
Thierry Strudel3d639192016-09-09 11:52:26 -0700190
191const QCamera3HardwareInterface::QCameraMap<
192 camera_metadata_enum_android_control_effect_mode_t,
193 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
194 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
195 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
196 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
197 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
199 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
200 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
202 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
203};
204
205const QCamera3HardwareInterface::QCameraMap<
206 camera_metadata_enum_android_control_awb_mode_t,
207 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
208 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
209 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
210 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
211 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
212 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
213 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
215 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
216 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220 camera_metadata_enum_android_control_scene_mode_t,
221 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
222 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
223 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
224 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
225 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
227 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
228 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
229 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
230 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
231 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
232 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
233 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
234 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
235 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
236 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800237 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
238 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700239};
240
241const QCamera3HardwareInterface::QCameraMap<
242 camera_metadata_enum_android_control_af_mode_t,
243 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
245 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
246 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
247 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
248 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
250 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
251};
252
253const QCamera3HardwareInterface::QCameraMap<
254 camera_metadata_enum_android_color_correction_aberration_mode_t,
255 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
257 CAM_COLOR_CORRECTION_ABERRATION_OFF },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
259 CAM_COLOR_CORRECTION_ABERRATION_FAST },
260 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
261 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
262};
263
264const QCamera3HardwareInterface::QCameraMap<
265 camera_metadata_enum_android_control_ae_antibanding_mode_t,
266 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
270 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
271};
272
273const QCamera3HardwareInterface::QCameraMap<
274 camera_metadata_enum_android_control_ae_mode_t,
275 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
276 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
279 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
280 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100463 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800464 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mChannelHandle(0),
466 mFirstConfiguration(true),
467 mFlush(false),
468 mFlushPerf(false),
469 mParamHeap(NULL),
470 mParameters(NULL),
471 mPrevParameters(NULL),
472 m_bIsVideo(false),
473 m_bIs4KVideo(false),
474 m_bEisSupportedSize(false),
475 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800476 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700478 mShutterDispatcher(this),
479 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 mMinProcessedFrameDuration(0),
481 mMinJpegFrameDuration(0),
482 mMinRawFrameDuration(0),
483 mMetaFrameCount(0U),
484 mUpdateDebugLevel(false),
485 mCallbacks(callbacks),
486 mCaptureIntent(0),
487 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700488 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800489 /* DevCamDebug metadata internal m control*/
490 mDevCamDebugMetaEnable(0),
491 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700492 mBatchSize(0),
493 mToBeQueuedVidBufs(0),
494 mHFRVideoFps(DEFAULT_VIDEO_FPS),
495 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800496 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800497 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700498 mFirstFrameNumberInBatch(0),
499 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800500 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700501 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
502 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000503 mPDSupported(false),
504 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700505 mInstantAEC(false),
506 mResetInstantAEC(false),
507 mInstantAECSettledFrameNumber(0),
508 mAecSkipDisplayFrameBound(0),
509 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800510 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700513 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mState(CLOSED),
515 mIsDeviceLinked(false),
516 mIsMainCamera(true),
517 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700518 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800519 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700521 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800522 mIsApInputUsedForHdrPlus(false),
523 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800524 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700525{
526 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCommon.init(gCamCapability[cameraId]);
528 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700529#ifndef USE_HAL_3_3
530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
531#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700533#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.close = close_camera_device;
535 mCameraDevice.ops = &mCameraOps;
536 mCameraDevice.priv = this;
537 gCamCapability[cameraId]->version = CAM_HAL_V3;
538 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
539 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
540 gCamCapability[cameraId]->min_num_pp_bufs = 3;
541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545 mPendingLiveRequest = 0;
546 mCurrentRequestId = -1;
547 pthread_mutex_init(&mMutex, NULL);
548
549 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
550 mDefaultMetadata[i] = NULL;
551
552 // Getting system props of different kinds
553 char prop[PROPERTY_VALUE_MAX];
554 memset(prop, 0, sizeof(prop));
555 property_get("persist.camera.raw.dump", prop, "0");
556 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800557 property_get("persist.camera.hal3.force.hdr", prop, "0");
558 mForceHdrSnapshot = atoi(prop);
559
Thierry Strudel3d639192016-09-09 11:52:26 -0700560 if (mEnableRawDump)
561 LOGD("Raw dump from Camera HAL enabled");
562
563 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
564 memset(mLdafCalib, 0, sizeof(mLdafCalib));
565
566 memset(prop, 0, sizeof(prop));
567 property_get("persist.camera.tnr.preview", prop, "0");
568 m_bTnrPreview = (uint8_t)atoi(prop);
569
570 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800571 property_get("persist.camera.swtnr.preview", prop, "1");
572 m_bSwTnrPreview = (uint8_t)atoi(prop);
573
574 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700575 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 m_bTnrVideo = (uint8_t)atoi(prop);
577
578 memset(prop, 0, sizeof(prop));
579 property_get("persist.camera.avtimer.debug", prop, "0");
580 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800581 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700582
Thierry Strudel54dc9782017-02-15 12:12:10 -0800583 memset(prop, 0, sizeof(prop));
584 property_get("persist.camera.cacmode.disable", prop, "0");
585 m_cacModeDisabled = (uint8_t)atoi(prop);
586
Thierry Strudel3d639192016-09-09 11:52:26 -0700587 //Load and read GPU library.
588 lib_surface_utils = NULL;
589 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700590 mSurfaceStridePadding = CAM_PAD_TO_64;
591#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700592 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
593 if (lib_surface_utils) {
594 *(void **)&LINK_get_surface_pixel_alignment =
595 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
596 if (LINK_get_surface_pixel_alignment) {
597 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
598 }
599 dlclose(lib_surface_utils);
600 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700601#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000602 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
603 mPDSupported = (0 <= mPDIndex) ? true : false;
604
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700605 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700606}
607
608/*===========================================================================
609 * FUNCTION : ~QCamera3HardwareInterface
610 *
611 * DESCRIPTION: destructor of QCamera3HardwareInterface
612 *
613 * PARAMETERS : none
614 *
615 * RETURN : none
616 *==========================================================================*/
617QCamera3HardwareInterface::~QCamera3HardwareInterface()
618{
619 LOGD("E");
620
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800621 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700622
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800623 // Disable power hint and enable the perf lock for close camera
624 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
625 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
626
627 // unlink of dualcam during close camera
628 if (mIsDeviceLinked) {
629 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
630 &m_pDualCamCmdPtr->bundle_info;
631 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
632 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
633 pthread_mutex_lock(&gCamLock);
634
635 if (mIsMainCamera == 1) {
636 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
637 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
638 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
639 // related session id should be session id of linked session
640 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
641 } else {
642 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
643 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
644 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
645 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
646 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800647 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800648 pthread_mutex_unlock(&gCamLock);
649
650 rc = mCameraHandle->ops->set_dual_cam_cmd(
651 mCameraHandle->camera_handle);
652 if (rc < 0) {
653 LOGE("Dualcam: Unlink failed, but still proceed to close");
654 }
655 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700656
657 /* We need to stop all streams before deleting any stream */
658 if (mRawDumpChannel) {
659 mRawDumpChannel->stop();
660 }
661
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700662 if (mHdrPlusRawSrcChannel) {
663 mHdrPlusRawSrcChannel->stop();
664 }
665
Thierry Strudel3d639192016-09-09 11:52:26 -0700666 // NOTE: 'camera3_stream_t *' objects are already freed at
667 // this stage by the framework
668 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
669 it != mStreamInfo.end(); it++) {
670 QCamera3ProcessingChannel *channel = (*it)->channel;
671 if (channel) {
672 channel->stop();
673 }
674 }
675 if (mSupportChannel)
676 mSupportChannel->stop();
677
678 if (mAnalysisChannel) {
679 mAnalysisChannel->stop();
680 }
681 if (mMetadataChannel) {
682 mMetadataChannel->stop();
683 }
684 if (mChannelHandle) {
685 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
686 mChannelHandle);
687 LOGD("stopping channel %d", mChannelHandle);
688 }
689
690 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
691 it != mStreamInfo.end(); it++) {
692 QCamera3ProcessingChannel *channel = (*it)->channel;
693 if (channel)
694 delete channel;
695 free (*it);
696 }
697 if (mSupportChannel) {
698 delete mSupportChannel;
699 mSupportChannel = NULL;
700 }
701
702 if (mAnalysisChannel) {
703 delete mAnalysisChannel;
704 mAnalysisChannel = NULL;
705 }
706 if (mRawDumpChannel) {
707 delete mRawDumpChannel;
708 mRawDumpChannel = NULL;
709 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700710 if (mHdrPlusRawSrcChannel) {
711 delete mHdrPlusRawSrcChannel;
712 mHdrPlusRawSrcChannel = NULL;
713 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700714 if (mDummyBatchChannel) {
715 delete mDummyBatchChannel;
716 mDummyBatchChannel = NULL;
717 }
718
719 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800720 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700721
722 if (mMetadataChannel) {
723 delete mMetadataChannel;
724 mMetadataChannel = NULL;
725 }
726
727 /* Clean up all channels */
728 if (mCameraInitialized) {
729 if(!mFirstConfiguration){
730 //send the last unconfigure
731 cam_stream_size_info_t stream_config_info;
732 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
733 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
734 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800735 m_bIs4KVideo ? 0 :
736 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700737 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
739 stream_config_info);
740 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
741 if (rc < 0) {
742 LOGE("set_parms failed for unconfigure");
743 }
744 }
745 deinitParameters();
746 }
747
748 if (mChannelHandle) {
749 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
750 mChannelHandle);
751 LOGH("deleting channel %d", mChannelHandle);
752 mChannelHandle = 0;
753 }
754
755 if (mState != CLOSED)
756 closeCamera();
757
758 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
759 req.mPendingBufferList.clear();
760 }
761 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 for (pendingRequestIterator i = mPendingRequestsList.begin();
763 i != mPendingRequestsList.end();) {
764 i = erasePendingRequest(i);
765 }
766 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
767 if (mDefaultMetadata[i])
768 free_camera_metadata(mDefaultMetadata[i]);
769
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800770 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700771
772 pthread_cond_destroy(&mRequestCond);
773
774 pthread_cond_destroy(&mBuffersCond);
775
776 pthread_mutex_destroy(&mMutex);
777 LOGD("X");
778}
779
780/*===========================================================================
781 * FUNCTION : erasePendingRequest
782 *
783 * DESCRIPTION: function to erase a desired pending request after freeing any
784 * allocated memory
785 *
786 * PARAMETERS :
787 * @i : iterator pointing to pending request to be erased
788 *
789 * RETURN : iterator pointing to the next request
790 *==========================================================================*/
791QCamera3HardwareInterface::pendingRequestIterator
792 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
793{
794 if (i->input_buffer != NULL) {
795 free(i->input_buffer);
796 i->input_buffer = NULL;
797 }
798 if (i->settings != NULL)
799 free_camera_metadata((camera_metadata_t*)i->settings);
800 return mPendingRequestsList.erase(i);
801}
802
803/*===========================================================================
804 * FUNCTION : camEvtHandle
805 *
806 * DESCRIPTION: Function registered to mm-camera-interface to handle events
807 *
808 * PARAMETERS :
809 * @camera_handle : interface layer camera handle
810 * @evt : ptr to event
811 * @user_data : user data ptr
812 *
813 * RETURN : none
814 *==========================================================================*/
815void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
816 mm_camera_event_t *evt,
817 void *user_data)
818{
819 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
820 if (obj && evt) {
821 switch(evt->server_event_type) {
822 case CAM_EVENT_TYPE_DAEMON_DIED:
823 pthread_mutex_lock(&obj->mMutex);
824 obj->mState = ERROR;
825 pthread_mutex_unlock(&obj->mMutex);
826 LOGE("Fatal, camera daemon died");
827 break;
828
829 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
830 LOGD("HAL got request pull from Daemon");
831 pthread_mutex_lock(&obj->mMutex);
832 obj->mWokenUpByDaemon = true;
833 obj->unblockRequestIfNecessary();
834 pthread_mutex_unlock(&obj->mMutex);
835 break;
836
837 default:
838 LOGW("Warning: Unhandled event %d",
839 evt->server_event_type);
840 break;
841 }
842 } else {
843 LOGE("NULL user_data/evt");
844 }
845}
846
847/*===========================================================================
848 * FUNCTION : openCamera
849 *
850 * DESCRIPTION: open camera
851 *
852 * PARAMETERS :
853 * @hw_device : double ptr for camera device struct
854 *
855 * RETURN : int32_t type of status
856 * NO_ERROR -- success
857 * none-zero failure code
858 *==========================================================================*/
859int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
860{
861 int rc = 0;
862 if (mState != CLOSED) {
863 *hw_device = NULL;
864 return PERMISSION_DENIED;
865 }
866
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700867 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800868 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
870 mCameraId);
871
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700872 if (mCameraHandle) {
873 LOGE("Failure: Camera already opened");
874 return ALREADY_EXISTS;
875 }
876
877 {
878 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700879 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700880 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700881 rc = gEaselManagerClient->resume();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700882 if (rc != 0) {
883 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
884 return rc;
885 }
886 }
887 }
888
Thierry Strudel3d639192016-09-09 11:52:26 -0700889 rc = openCamera();
890 if (rc == 0) {
891 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800892 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700893 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700894
895 // Suspend Easel because opening camera failed.
896 {
897 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700898 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
899 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700900 if (suspendErr != 0) {
901 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
902 strerror(-suspendErr), suspendErr);
903 }
904 }
905 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800906 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700907
Thierry Strudel3d639192016-09-09 11:52:26 -0700908 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
909 mCameraId, rc);
910
911 if (rc == NO_ERROR) {
912 mState = OPENED;
913 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800914
Thierry Strudel3d639192016-09-09 11:52:26 -0700915 return rc;
916}
917
918/*===========================================================================
919 * FUNCTION : openCamera
920 *
921 * DESCRIPTION: open camera
922 *
923 * PARAMETERS : none
924 *
925 * RETURN : int32_t type of status
926 * NO_ERROR -- success
927 * none-zero failure code
928 *==========================================================================*/
929int QCamera3HardwareInterface::openCamera()
930{
931 int rc = 0;
932 char value[PROPERTY_VALUE_MAX];
933
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800934 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800935
Thierry Strudel3d639192016-09-09 11:52:26 -0700936 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
937 if (rc < 0) {
938 LOGE("Failed to reserve flash for camera id: %d",
939 mCameraId);
940 return UNKNOWN_ERROR;
941 }
942
943 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
944 if (rc) {
945 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
946 return rc;
947 }
948
949 if (!mCameraHandle) {
950 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
951 return -ENODEV;
952 }
953
954 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
955 camEvtHandle, (void *)this);
956
957 if (rc < 0) {
958 LOGE("Error, failed to register event callback");
959 /* Not closing camera here since it is already handled in destructor */
960 return FAILED_TRANSACTION;
961 }
962
963 mExifParams.debug_params =
964 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
965 if (mExifParams.debug_params) {
966 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
967 } else {
968 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
969 return NO_MEMORY;
970 }
971 mFirstConfiguration = true;
972
973 //Notify display HAL that a camera session is active.
974 //But avoid calling the same during bootup because camera service might open/close
975 //cameras at boot time during its initialization and display service will also internally
976 //wait for camera service to initialize first while calling this display API, resulting in a
977 //deadlock situation. Since boot time camera open/close calls are made only to fetch
978 //capabilities, no need of this display bw optimization.
979 //Use "service.bootanim.exit" property to know boot status.
980 property_get("service.bootanim.exit", value, "0");
981 if (atoi(value) == 1) {
982 pthread_mutex_lock(&gCamLock);
983 if (gNumCameraSessions++ == 0) {
984 setCameraLaunchStatus(true);
985 }
986 pthread_mutex_unlock(&gCamLock);
987 }
988
989 //fill the session id needed while linking dual cam
990 pthread_mutex_lock(&gCamLock);
991 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
992 &sessionId[mCameraId]);
993 pthread_mutex_unlock(&gCamLock);
994
995 if (rc < 0) {
996 LOGE("Error, failed to get sessiion id");
997 return UNKNOWN_ERROR;
998 } else {
999 //Allocate related cam sync buffer
1000 //this is needed for the payload that goes along with bundling cmd for related
1001 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001002 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1003 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001004 if(rc != OK) {
1005 rc = NO_MEMORY;
1006 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1007 return NO_MEMORY;
1008 }
1009
1010 //Map memory for related cam sync buffer
1011 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001012 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1013 m_pDualCamCmdHeap->getFd(0),
1014 sizeof(cam_dual_camera_cmd_info_t),
1015 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001016 if(rc < 0) {
1017 LOGE("Dualcam: failed to map Related cam sync buffer");
1018 rc = FAILED_TRANSACTION;
1019 return NO_MEMORY;
1020 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001021 m_pDualCamCmdPtr =
1022 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001023 }
1024
1025 LOGH("mCameraId=%d",mCameraId);
1026
1027 return NO_ERROR;
1028}
1029
1030/*===========================================================================
1031 * FUNCTION : closeCamera
1032 *
1033 * DESCRIPTION: close camera
1034 *
1035 * PARAMETERS : none
1036 *
1037 * RETURN : int32_t type of status
1038 * NO_ERROR -- success
1039 * none-zero failure code
1040 *==========================================================================*/
1041int QCamera3HardwareInterface::closeCamera()
1042{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001043 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001044 int rc = NO_ERROR;
1045 char value[PROPERTY_VALUE_MAX];
1046
1047 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1048 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001049
1050 // unmap memory for related cam sync buffer
1051 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001052 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 if (NULL != m_pDualCamCmdHeap) {
1054 m_pDualCamCmdHeap->deallocate();
1055 delete m_pDualCamCmdHeap;
1056 m_pDualCamCmdHeap = NULL;
1057 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001058 }
1059
Thierry Strudel3d639192016-09-09 11:52:26 -07001060 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1061 mCameraHandle = NULL;
1062
1063 //reset session id to some invalid id
1064 pthread_mutex_lock(&gCamLock);
1065 sessionId[mCameraId] = 0xDEADBEEF;
1066 pthread_mutex_unlock(&gCamLock);
1067
1068 //Notify display HAL that there is no active camera session
1069 //but avoid calling the same during bootup. Refer to openCamera
1070 //for more details.
1071 property_get("service.bootanim.exit", value, "0");
1072 if (atoi(value) == 1) {
1073 pthread_mutex_lock(&gCamLock);
1074 if (--gNumCameraSessions == 0) {
1075 setCameraLaunchStatus(false);
1076 }
1077 pthread_mutex_unlock(&gCamLock);
1078 }
1079
Thierry Strudel3d639192016-09-09 11:52:26 -07001080 if (mExifParams.debug_params) {
1081 free(mExifParams.debug_params);
1082 mExifParams.debug_params = NULL;
1083 }
1084 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1085 LOGW("Failed to release flash for camera id: %d",
1086 mCameraId);
1087 }
1088 mState = CLOSED;
1089 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1090 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001091
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001092 {
1093 Mutex::Autolock l(gHdrPlusClientLock);
1094 if (gHdrPlusClient != nullptr) {
1095 // Disable HDR+ mode.
1096 disableHdrPlusModeLocked();
1097 // Disconnect Easel if it's connected.
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001098 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001099 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001100 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001101
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001102 if (EaselManagerClientOpened) {
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001103 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001104 if (rc != 0) {
1105 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1106 }
1107
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001108 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001109 if (rc != 0) {
1110 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1111 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001112 }
1113 }
1114
Thierry Strudel3d639192016-09-09 11:52:26 -07001115 return rc;
1116}
1117
1118/*===========================================================================
1119 * FUNCTION : initialize
1120 *
1121 * DESCRIPTION: Initialize frameworks callback functions
1122 *
1123 * PARAMETERS :
1124 * @callback_ops : callback function to frameworks
1125 *
1126 * RETURN :
1127 *
1128 *==========================================================================*/
1129int QCamera3HardwareInterface::initialize(
1130 const struct camera3_callback_ops *callback_ops)
1131{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001132 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001133 int rc;
1134
1135 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1136 pthread_mutex_lock(&mMutex);
1137
1138 // Validate current state
1139 switch (mState) {
1140 case OPENED:
1141 /* valid state */
1142 break;
1143 default:
1144 LOGE("Invalid state %d", mState);
1145 rc = -ENODEV;
1146 goto err1;
1147 }
1148
1149 rc = initParameters();
1150 if (rc < 0) {
1151 LOGE("initParamters failed %d", rc);
1152 goto err1;
1153 }
1154 mCallbackOps = callback_ops;
1155
1156 mChannelHandle = mCameraHandle->ops->add_channel(
1157 mCameraHandle->camera_handle, NULL, NULL, this);
1158 if (mChannelHandle == 0) {
1159 LOGE("add_channel failed");
1160 rc = -ENOMEM;
1161 pthread_mutex_unlock(&mMutex);
1162 return rc;
1163 }
1164
1165 pthread_mutex_unlock(&mMutex);
1166 mCameraInitialized = true;
1167 mState = INITIALIZED;
1168 LOGI("X");
1169 return 0;
1170
1171err1:
1172 pthread_mutex_unlock(&mMutex);
1173 return rc;
1174}
1175
1176/*===========================================================================
1177 * FUNCTION : validateStreamDimensions
1178 *
1179 * DESCRIPTION: Check if the configuration requested are those advertised
1180 *
1181 * PARAMETERS :
1182 * @stream_list : streams to be configured
1183 *
1184 * RETURN :
1185 *
1186 *==========================================================================*/
1187int QCamera3HardwareInterface::validateStreamDimensions(
1188 camera3_stream_configuration_t *streamList)
1189{
1190 int rc = NO_ERROR;
1191 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001192 uint32_t depthWidth = 0;
1193 uint32_t depthHeight = 0;
1194 if (mPDSupported) {
1195 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1196 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1197 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001198
1199 camera3_stream_t *inputStream = NULL;
1200 /*
1201 * Loop through all streams to find input stream if it exists*
1202 */
1203 for (size_t i = 0; i< streamList->num_streams; i++) {
1204 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1205 if (inputStream != NULL) {
1206 LOGE("Error, Multiple input streams requested");
1207 return -EINVAL;
1208 }
1209 inputStream = streamList->streams[i];
1210 }
1211 }
1212 /*
1213 * Loop through all streams requested in configuration
1214 * Check if unsupported sizes have been requested on any of them
1215 */
1216 for (size_t j = 0; j < streamList->num_streams; j++) {
1217 bool sizeFound = false;
1218 camera3_stream_t *newStream = streamList->streams[j];
1219
1220 uint32_t rotatedHeight = newStream->height;
1221 uint32_t rotatedWidth = newStream->width;
1222 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1223 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1224 rotatedHeight = newStream->width;
1225 rotatedWidth = newStream->height;
1226 }
1227
1228 /*
1229 * Sizes are different for each type of stream format check against
1230 * appropriate table.
1231 */
1232 switch (newStream->format) {
1233 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1234 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1235 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001236 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1237 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1238 mPDSupported) {
1239 if ((depthWidth == newStream->width) &&
1240 (depthHeight == newStream->height)) {
1241 sizeFound = true;
1242 }
1243 break;
1244 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001245 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1246 for (size_t i = 0; i < count; i++) {
1247 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1248 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1249 sizeFound = true;
1250 break;
1251 }
1252 }
1253 break;
1254 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1256 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001257 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001258 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001259 if ((depthSamplesCount == newStream->width) &&
1260 (1 == newStream->height)) {
1261 sizeFound = true;
1262 }
1263 break;
1264 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001265 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1266 /* Verify set size against generated sizes table */
1267 for (size_t i = 0; i < count; i++) {
1268 if (((int32_t)rotatedWidth ==
1269 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1270 ((int32_t)rotatedHeight ==
1271 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1272 sizeFound = true;
1273 break;
1274 }
1275 }
1276 break;
1277 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1278 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1279 default:
1280 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1281 || newStream->stream_type == CAMERA3_STREAM_INPUT
1282 || IS_USAGE_ZSL(newStream->usage)) {
1283 if (((int32_t)rotatedWidth ==
1284 gCamCapability[mCameraId]->active_array_size.width) &&
1285 ((int32_t)rotatedHeight ==
1286 gCamCapability[mCameraId]->active_array_size.height)) {
1287 sizeFound = true;
1288 break;
1289 }
1290 /* We could potentially break here to enforce ZSL stream
1291 * set from frameworks always is full active array size
1292 * but it is not clear from the spc if framework will always
1293 * follow that, also we have logic to override to full array
1294 * size, so keeping the logic lenient at the moment
1295 */
1296 }
1297 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1298 MAX_SIZES_CNT);
1299 for (size_t i = 0; i < count; i++) {
1300 if (((int32_t)rotatedWidth ==
1301 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1302 ((int32_t)rotatedHeight ==
1303 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1304 sizeFound = true;
1305 break;
1306 }
1307 }
1308 break;
1309 } /* End of switch(newStream->format) */
1310
1311 /* We error out even if a single stream has unsupported size set */
1312 if (!sizeFound) {
1313 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1314 rotatedWidth, rotatedHeight, newStream->format,
1315 gCamCapability[mCameraId]->active_array_size.width,
1316 gCamCapability[mCameraId]->active_array_size.height);
1317 rc = -EINVAL;
1318 break;
1319 }
1320 } /* End of for each stream */
1321 return rc;
1322}
1323
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001324/*===========================================================================
1325 * FUNCTION : validateUsageFlags
1326 *
1327 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1328 *
1329 * PARAMETERS :
1330 * @stream_list : streams to be configured
1331 *
1332 * RETURN :
1333 * NO_ERROR if the usage flags are supported
1334 * error code if usage flags are not supported
1335 *
1336 *==========================================================================*/
1337int QCamera3HardwareInterface::validateUsageFlags(
1338 const camera3_stream_configuration_t* streamList)
1339{
1340 for (size_t j = 0; j < streamList->num_streams; j++) {
1341 const camera3_stream_t *newStream = streamList->streams[j];
1342
1343 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1344 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1345 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1346 continue;
1347 }
1348
Jason Leec4cf5032017-05-24 18:31:41 -07001349 // Here we only care whether it's EIS3 or not
1350 char is_type_value[PROPERTY_VALUE_MAX];
1351 property_get("persist.camera.is_type", is_type_value, "4");
1352 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1353 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1354 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1355 isType = IS_TYPE_NONE;
1356
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001357 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1358 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1359 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1360 bool forcePreviewUBWC = true;
1361 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1362 forcePreviewUBWC = false;
1363 }
1364 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001365 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001366 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001367 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001369 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001370
1371 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1372 // So color spaces will always match.
1373
1374 // Check whether underlying formats of shared streams match.
1375 if (isVideo && isPreview && videoFormat != previewFormat) {
1376 LOGE("Combined video and preview usage flag is not supported");
1377 return -EINVAL;
1378 }
1379 if (isPreview && isZSL && previewFormat != zslFormat) {
1380 LOGE("Combined preview and zsl usage flag is not supported");
1381 return -EINVAL;
1382 }
1383 if (isVideo && isZSL && videoFormat != zslFormat) {
1384 LOGE("Combined video and zsl usage flag is not supported");
1385 return -EINVAL;
1386 }
1387 }
1388 return NO_ERROR;
1389}
1390
1391/*===========================================================================
1392 * FUNCTION : validateUsageFlagsForEis
1393 *
1394 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 *
1399 * RETURN :
1400 * NO_ERROR if the usage flags are supported
1401 * error code if usage flags are not supported
1402 *
1403 *==========================================================================*/
1404int QCamera3HardwareInterface::validateUsageFlagsForEis(
1405 const camera3_stream_configuration_t* streamList)
1406{
1407 for (size_t j = 0; j < streamList->num_streams; j++) {
1408 const camera3_stream_t *newStream = streamList->streams[j];
1409
1410 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1411 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1412
1413 // Because EIS is "hard-coded" for certain use case, and current
1414 // implementation doesn't support shared preview and video on the same
1415 // stream, return failure if EIS is forced on.
1416 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1417 LOGE("Combined video and preview usage flag is not supported due to EIS");
1418 return -EINVAL;
1419 }
1420 }
1421 return NO_ERROR;
1422}
1423
Thierry Strudel3d639192016-09-09 11:52:26 -07001424/*==============================================================================
1425 * FUNCTION : isSupportChannelNeeded
1426 *
1427 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1428 *
1429 * PARAMETERS :
1430 * @stream_list : streams to be configured
1431 * @stream_config_info : the config info for streams to be configured
1432 *
1433 * RETURN : Boolen true/false decision
1434 *
1435 *==========================================================================*/
1436bool QCamera3HardwareInterface::isSupportChannelNeeded(
1437 camera3_stream_configuration_t *streamList,
1438 cam_stream_size_info_t stream_config_info)
1439{
1440 uint32_t i;
1441 bool pprocRequested = false;
1442 /* Check for conditions where PProc pipeline does not have any streams*/
1443 for (i = 0; i < stream_config_info.num_streams; i++) {
1444 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1445 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1446 pprocRequested = true;
1447 break;
1448 }
1449 }
1450
1451 if (pprocRequested == false )
1452 return true;
1453
1454 /* Dummy stream needed if only raw or jpeg streams present */
1455 for (i = 0; i < streamList->num_streams; i++) {
1456 switch(streamList->streams[i]->format) {
1457 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1458 case HAL_PIXEL_FORMAT_RAW10:
1459 case HAL_PIXEL_FORMAT_RAW16:
1460 case HAL_PIXEL_FORMAT_BLOB:
1461 break;
1462 default:
1463 return false;
1464 }
1465 }
1466 return true;
1467}
1468
1469/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001470 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001471 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001473 *
1474 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001475 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001476 *
1477 * RETURN : int32_t type of status
1478 * NO_ERROR -- success
1479 * none-zero failure code
1480 *
1481 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001483{
1484 int32_t rc = NO_ERROR;
1485
1486 cam_dimension_t max_dim = {0, 0};
1487 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1488 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1489 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1490 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1491 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1492 }
1493
1494 clear_metadata_buffer(mParameters);
1495
1496 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1497 max_dim);
1498 if (rc != NO_ERROR) {
1499 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1500 return rc;
1501 }
1502
1503 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1504 if (rc != NO_ERROR) {
1505 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1506 return rc;
1507 }
1508
1509 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001510 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001511
1512 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1513 mParameters);
1514 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001515 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001516 return rc;
1517 }
1518
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001519 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001520 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1521 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1522 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1523 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1524 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001525
1526 return rc;
1527}
1528
1529/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001530 * FUNCTION : getCurrentSensorModeInfo
1531 *
1532 * DESCRIPTION: Get sensor mode information that is currently selected.
1533 *
1534 * PARAMETERS :
1535 * @sensorModeInfo : sensor mode information (output)
1536 *
1537 * RETURN : int32_t type of status
1538 * NO_ERROR -- success
1539 * none-zero failure code
1540 *
1541 *==========================================================================*/
1542int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1543{
1544 int32_t rc = NO_ERROR;
1545
1546 clear_metadata_buffer(mParameters);
1547 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1548
1549 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1550 mParameters);
1551 if (rc != NO_ERROR) {
1552 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1553 return rc;
1554 }
1555
1556 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1557 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1558 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1559 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1560 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1561 sensorModeInfo.num_raw_bits);
1562
1563 return rc;
1564}
1565
1566/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001567 * FUNCTION : addToPPFeatureMask
1568 *
1569 * DESCRIPTION: add additional features to pp feature mask based on
1570 * stream type and usecase
1571 *
1572 * PARAMETERS :
1573 * @stream_format : stream type for feature mask
1574 * @stream_idx : stream idx within postprocess_mask list to change
1575 *
1576 * RETURN : NULL
1577 *
1578 *==========================================================================*/
1579void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1580 uint32_t stream_idx)
1581{
1582 char feature_mask_value[PROPERTY_VALUE_MAX];
1583 cam_feature_mask_t feature_mask;
1584 int args_converted;
1585 int property_len;
1586
1587 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001588#ifdef _LE_CAMERA_
1589 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1590 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1591 property_len = property_get("persist.camera.hal3.feature",
1592 feature_mask_value, swtnr_feature_mask_value);
1593#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001594 property_len = property_get("persist.camera.hal3.feature",
1595 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001596#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001597 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1598 (feature_mask_value[1] == 'x')) {
1599 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1600 } else {
1601 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1602 }
1603 if (1 != args_converted) {
1604 feature_mask = 0;
1605 LOGE("Wrong feature mask %s", feature_mask_value);
1606 return;
1607 }
1608
1609 switch (stream_format) {
1610 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1611 /* Add LLVD to pp feature mask only if video hint is enabled */
1612 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1613 mStreamConfigInfo.postprocess_mask[stream_idx]
1614 |= CAM_QTI_FEATURE_SW_TNR;
1615 LOGH("Added SW TNR to pp feature mask");
1616 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1617 mStreamConfigInfo.postprocess_mask[stream_idx]
1618 |= CAM_QCOM_FEATURE_LLVD;
1619 LOGH("Added LLVD SeeMore to pp feature mask");
1620 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001621 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1622 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1623 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1624 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001625 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1626 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1627 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1628 CAM_QTI_FEATURE_BINNING_CORRECTION;
1629 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001630 break;
1631 }
1632 default:
1633 break;
1634 }
1635 LOGD("PP feature mask %llx",
1636 mStreamConfigInfo.postprocess_mask[stream_idx]);
1637}
1638
1639/*==============================================================================
1640 * FUNCTION : updateFpsInPreviewBuffer
1641 *
1642 * DESCRIPTION: update FPS information in preview buffer.
1643 *
1644 * PARAMETERS :
1645 * @metadata : pointer to metadata buffer
1646 * @frame_number: frame_number to look for in pending buffer list
1647 *
1648 * RETURN : None
1649 *
1650 *==========================================================================*/
1651void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1652 uint32_t frame_number)
1653{
1654 // Mark all pending buffers for this particular request
1655 // with corresponding framerate information
1656 for (List<PendingBuffersInRequest>::iterator req =
1657 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1658 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1659 for(List<PendingBufferInfo>::iterator j =
1660 req->mPendingBufferList.begin();
1661 j != req->mPendingBufferList.end(); j++) {
1662 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1663 if ((req->frame_number == frame_number) &&
1664 (channel->getStreamTypeMask() &
1665 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1666 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1667 CAM_INTF_PARM_FPS_RANGE, metadata) {
1668 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1669 struct private_handle_t *priv_handle =
1670 (struct private_handle_t *)(*(j->buffer));
1671 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1672 }
1673 }
1674 }
1675 }
1676}
1677
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001678/*==============================================================================
1679 * FUNCTION : updateTimeStampInPendingBuffers
1680 *
1681 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1682 * of a frame number
1683 *
1684 * PARAMETERS :
1685 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1686 * @timestamp : timestamp to be set
1687 *
1688 * RETURN : None
1689 *
1690 *==========================================================================*/
1691void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1692 uint32_t frameNumber, nsecs_t timestamp)
1693{
1694 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1695 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1696 if (req->frame_number != frameNumber)
1697 continue;
1698
1699 for (auto k = req->mPendingBufferList.begin();
1700 k != req->mPendingBufferList.end(); k++ ) {
1701 struct private_handle_t *priv_handle =
1702 (struct private_handle_t *) (*(k->buffer));
1703 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1704 }
1705 }
1706 return;
1707}
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709/*===========================================================================
1710 * FUNCTION : configureStreams
1711 *
1712 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1713 * and output streams.
1714 *
1715 * PARAMETERS :
1716 * @stream_list : streams to be configured
1717 *
1718 * RETURN :
1719 *
1720 *==========================================================================*/
1721int QCamera3HardwareInterface::configureStreams(
1722 camera3_stream_configuration_t *streamList)
1723{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001724 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001725 int rc = 0;
1726
1727 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001728 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001729 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001730 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001731
1732 return rc;
1733}
1734
1735/*===========================================================================
1736 * FUNCTION : configureStreamsPerfLocked
1737 *
1738 * DESCRIPTION: configureStreams while perfLock is held.
1739 *
1740 * PARAMETERS :
1741 * @stream_list : streams to be configured
1742 *
1743 * RETURN : int32_t type of status
1744 * NO_ERROR -- success
1745 * none-zero failure code
1746 *==========================================================================*/
1747int QCamera3HardwareInterface::configureStreamsPerfLocked(
1748 camera3_stream_configuration_t *streamList)
1749{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001750 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001751 int rc = 0;
1752
1753 // Sanity check stream_list
1754 if (streamList == NULL) {
1755 LOGE("NULL stream configuration");
1756 return BAD_VALUE;
1757 }
1758 if (streamList->streams == NULL) {
1759 LOGE("NULL stream list");
1760 return BAD_VALUE;
1761 }
1762
1763 if (streamList->num_streams < 1) {
1764 LOGE("Bad number of streams requested: %d",
1765 streamList->num_streams);
1766 return BAD_VALUE;
1767 }
1768
1769 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1770 LOGE("Maximum number of streams %d exceeded: %d",
1771 MAX_NUM_STREAMS, streamList->num_streams);
1772 return BAD_VALUE;
1773 }
1774
Jason Leec4cf5032017-05-24 18:31:41 -07001775 mOpMode = streamList->operation_mode;
1776 LOGD("mOpMode: %d", mOpMode);
1777
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001778 rc = validateUsageFlags(streamList);
1779 if (rc != NO_ERROR) {
1780 return rc;
1781 }
1782
Thierry Strudel3d639192016-09-09 11:52:26 -07001783 /* first invalidate all the steams in the mStreamList
1784 * if they appear again, they will be validated */
1785 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1786 it != mStreamInfo.end(); it++) {
1787 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1788 if (channel) {
1789 channel->stop();
1790 }
1791 (*it)->status = INVALID;
1792 }
1793
1794 if (mRawDumpChannel) {
1795 mRawDumpChannel->stop();
1796 delete mRawDumpChannel;
1797 mRawDumpChannel = NULL;
1798 }
1799
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001800 if (mHdrPlusRawSrcChannel) {
1801 mHdrPlusRawSrcChannel->stop();
1802 delete mHdrPlusRawSrcChannel;
1803 mHdrPlusRawSrcChannel = NULL;
1804 }
1805
Thierry Strudel3d639192016-09-09 11:52:26 -07001806 if (mSupportChannel)
1807 mSupportChannel->stop();
1808
1809 if (mAnalysisChannel) {
1810 mAnalysisChannel->stop();
1811 }
1812 if (mMetadataChannel) {
1813 /* If content of mStreamInfo is not 0, there is metadata stream */
1814 mMetadataChannel->stop();
1815 }
1816 if (mChannelHandle) {
1817 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1818 mChannelHandle);
1819 LOGD("stopping channel %d", mChannelHandle);
1820 }
1821
1822 pthread_mutex_lock(&mMutex);
1823
1824 // Check state
1825 switch (mState) {
1826 case INITIALIZED:
1827 case CONFIGURED:
1828 case STARTED:
1829 /* valid state */
1830 break;
1831 default:
1832 LOGE("Invalid state %d", mState);
1833 pthread_mutex_unlock(&mMutex);
1834 return -ENODEV;
1835 }
1836
1837 /* Check whether we have video stream */
1838 m_bIs4KVideo = false;
1839 m_bIsVideo = false;
1840 m_bEisSupportedSize = false;
1841 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001842 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001843 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001844 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001845 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 uint32_t videoWidth = 0U;
1847 uint32_t videoHeight = 0U;
1848 size_t rawStreamCnt = 0;
1849 size_t stallStreamCnt = 0;
1850 size_t processedStreamCnt = 0;
1851 // Number of streams on ISP encoder path
1852 size_t numStreamsOnEncoder = 0;
1853 size_t numYuv888OnEncoder = 0;
1854 bool bYuv888OverrideJpeg = false;
1855 cam_dimension_t largeYuv888Size = {0, 0};
1856 cam_dimension_t maxViewfinderSize = {0, 0};
1857 bool bJpegExceeds4K = false;
1858 bool bJpegOnEncoder = false;
1859 bool bUseCommonFeatureMask = false;
1860 cam_feature_mask_t commonFeatureMask = 0;
1861 bool bSmallJpegSize = false;
1862 uint32_t width_ratio;
1863 uint32_t height_ratio;
1864 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1865 camera3_stream_t *inputStream = NULL;
1866 bool isJpeg = false;
1867 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001868 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001869 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001870
1871 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1872
1873 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001874 uint8_t eis_prop_set;
1875 uint32_t maxEisWidth = 0;
1876 uint32_t maxEisHeight = 0;
1877
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001878 // Initialize all instant AEC related variables
1879 mInstantAEC = false;
1880 mResetInstantAEC = false;
1881 mInstantAECSettledFrameNumber = 0;
1882 mAecSkipDisplayFrameBound = 0;
1883 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001884 mCurrFeatureState = 0;
1885 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001886
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1888
1889 size_t count = IS_TYPE_MAX;
1890 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1891 for (size_t i = 0; i < count; i++) {
1892 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001893 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1894 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001895 break;
1896 }
1897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001898
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001899 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001900 maxEisWidth = MAX_EIS_WIDTH;
1901 maxEisHeight = MAX_EIS_HEIGHT;
1902 }
1903
1904 /* EIS setprop control */
1905 char eis_prop[PROPERTY_VALUE_MAX];
1906 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001907 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001908 eis_prop_set = (uint8_t)atoi(eis_prop);
1909
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001910 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001911 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1912
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001913 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1914 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001915
Thierry Strudel3d639192016-09-09 11:52:26 -07001916 /* stream configurations */
1917 for (size_t i = 0; i < streamList->num_streams; i++) {
1918 camera3_stream_t *newStream = streamList->streams[i];
1919 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1920 "height = %d, rotation = %d, usage = 0x%x",
1921 i, newStream->stream_type, newStream->format,
1922 newStream->width, newStream->height, newStream->rotation,
1923 newStream->usage);
1924 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1925 newStream->stream_type == CAMERA3_STREAM_INPUT){
1926 isZsl = true;
1927 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001928 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1929 IS_USAGE_PREVIEW(newStream->usage)) {
1930 isPreview = true;
1931 }
1932
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1934 inputStream = newStream;
1935 }
1936
Emilian Peev7650c122017-01-19 08:24:33 -08001937 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1938 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001939 isJpeg = true;
1940 jpegSize.width = newStream->width;
1941 jpegSize.height = newStream->height;
1942 if (newStream->width > VIDEO_4K_WIDTH ||
1943 newStream->height > VIDEO_4K_HEIGHT)
1944 bJpegExceeds4K = true;
1945 }
1946
1947 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1948 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1949 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001950 // In HAL3 we can have multiple different video streams.
1951 // The variables video width and height are used below as
1952 // dimensions of the biggest of them
1953 if (videoWidth < newStream->width ||
1954 videoHeight < newStream->height) {
1955 videoWidth = newStream->width;
1956 videoHeight = newStream->height;
1957 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001958 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1959 (VIDEO_4K_HEIGHT <= newStream->height)) {
1960 m_bIs4KVideo = true;
1961 }
1962 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1963 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001964
Thierry Strudel3d639192016-09-09 11:52:26 -07001965 }
1966 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1967 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1968 switch (newStream->format) {
1969 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001970 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1971 depthPresent = true;
1972 break;
1973 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001974 stallStreamCnt++;
1975 if (isOnEncoder(maxViewfinderSize, newStream->width,
1976 newStream->height)) {
1977 numStreamsOnEncoder++;
1978 bJpegOnEncoder = true;
1979 }
1980 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1981 newStream->width);
1982 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1983 newStream->height);;
1984 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1985 "FATAL: max_downscale_factor cannot be zero and so assert");
1986 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1987 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1988 LOGH("Setting small jpeg size flag to true");
1989 bSmallJpegSize = true;
1990 }
1991 break;
1992 case HAL_PIXEL_FORMAT_RAW10:
1993 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1994 case HAL_PIXEL_FORMAT_RAW16:
1995 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001996 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1997 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1998 pdStatCount++;
1999 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002000 break;
2001 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2002 processedStreamCnt++;
2003 if (isOnEncoder(maxViewfinderSize, newStream->width,
2004 newStream->height)) {
2005 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2006 !IS_USAGE_ZSL(newStream->usage)) {
2007 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2008 }
2009 numStreamsOnEncoder++;
2010 }
2011 break;
2012 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2013 processedStreamCnt++;
2014 if (isOnEncoder(maxViewfinderSize, newStream->width,
2015 newStream->height)) {
2016 // If Yuv888 size is not greater than 4K, set feature mask
2017 // to SUPERSET so that it support concurrent request on
2018 // YUV and JPEG.
2019 if (newStream->width <= VIDEO_4K_WIDTH &&
2020 newStream->height <= VIDEO_4K_HEIGHT) {
2021 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2022 }
2023 numStreamsOnEncoder++;
2024 numYuv888OnEncoder++;
2025 largeYuv888Size.width = newStream->width;
2026 largeYuv888Size.height = newStream->height;
2027 }
2028 break;
2029 default:
2030 processedStreamCnt++;
2031 if (isOnEncoder(maxViewfinderSize, newStream->width,
2032 newStream->height)) {
2033 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2034 numStreamsOnEncoder++;
2035 }
2036 break;
2037 }
2038
2039 }
2040 }
2041
2042 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2043 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2044 !m_bIsVideo) {
2045 m_bEisEnable = false;
2046 }
2047
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002048 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2049 pthread_mutex_unlock(&mMutex);
2050 return -EINVAL;
2051 }
2052
Thierry Strudel54dc9782017-02-15 12:12:10 -08002053 uint8_t forceEnableTnr = 0;
2054 char tnr_prop[PROPERTY_VALUE_MAX];
2055 memset(tnr_prop, 0, sizeof(tnr_prop));
2056 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2057 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2058
Thierry Strudel3d639192016-09-09 11:52:26 -07002059 /* Logic to enable/disable TNR based on specific config size/etc.*/
2060 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002061 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2062 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002063 else if (forceEnableTnr)
2064 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002065
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002066 char videoHdrProp[PROPERTY_VALUE_MAX];
2067 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2068 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2069 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2070
2071 if (hdr_mode_prop == 1 && m_bIsVideo &&
2072 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2073 m_bVideoHdrEnabled = true;
2074 else
2075 m_bVideoHdrEnabled = false;
2076
2077
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 /* Check if num_streams is sane */
2079 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2080 rawStreamCnt > MAX_RAW_STREAMS ||
2081 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2082 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2083 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2084 pthread_mutex_unlock(&mMutex);
2085 return -EINVAL;
2086 }
2087 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002088 if (isZsl && m_bIs4KVideo) {
2089 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002090 pthread_mutex_unlock(&mMutex);
2091 return -EINVAL;
2092 }
2093 /* Check if stream sizes are sane */
2094 if (numStreamsOnEncoder > 2) {
2095 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2096 pthread_mutex_unlock(&mMutex);
2097 return -EINVAL;
2098 } else if (1 < numStreamsOnEncoder){
2099 bUseCommonFeatureMask = true;
2100 LOGH("Multiple streams above max viewfinder size, common mask needed");
2101 }
2102
2103 /* Check if BLOB size is greater than 4k in 4k recording case */
2104 if (m_bIs4KVideo && bJpegExceeds4K) {
2105 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2106 pthread_mutex_unlock(&mMutex);
2107 return -EINVAL;
2108 }
2109
Emilian Peev7650c122017-01-19 08:24:33 -08002110 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2111 depthPresent) {
2112 LOGE("HAL doesn't support depth streams in HFR mode!");
2113 pthread_mutex_unlock(&mMutex);
2114 return -EINVAL;
2115 }
2116
Thierry Strudel3d639192016-09-09 11:52:26 -07002117 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2118 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2119 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2120 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2121 // configurations:
2122 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2123 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2124 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2125 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2126 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2127 __func__);
2128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131
2132 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2133 // the YUV stream's size is greater or equal to the JPEG size, set common
2134 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2135 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2136 jpegSize.width, jpegSize.height) &&
2137 largeYuv888Size.width > jpegSize.width &&
2138 largeYuv888Size.height > jpegSize.height) {
2139 bYuv888OverrideJpeg = true;
2140 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2141 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2142 }
2143
2144 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2145 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2146 commonFeatureMask);
2147 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2148 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2149
2150 rc = validateStreamDimensions(streamList);
2151 if (rc == NO_ERROR) {
2152 rc = validateStreamRotations(streamList);
2153 }
2154 if (rc != NO_ERROR) {
2155 LOGE("Invalid stream configuration requested!");
2156 pthread_mutex_unlock(&mMutex);
2157 return rc;
2158 }
2159
Emilian Peev0f3c3162017-03-15 12:57:46 +00002160 if (1 < pdStatCount) {
2161 LOGE("HAL doesn't support multiple PD streams");
2162 pthread_mutex_unlock(&mMutex);
2163 return -EINVAL;
2164 }
2165
2166 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2167 (1 == pdStatCount)) {
2168 LOGE("HAL doesn't support PD streams in HFR mode!");
2169 pthread_mutex_unlock(&mMutex);
2170 return -EINVAL;
2171 }
2172
Thierry Strudel3d639192016-09-09 11:52:26 -07002173 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2174 for (size_t i = 0; i < streamList->num_streams; i++) {
2175 camera3_stream_t *newStream = streamList->streams[i];
2176 LOGH("newStream type = %d, stream format = %d "
2177 "stream size : %d x %d, stream rotation = %d",
2178 newStream->stream_type, newStream->format,
2179 newStream->width, newStream->height, newStream->rotation);
2180 //if the stream is in the mStreamList validate it
2181 bool stream_exists = false;
2182 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2183 it != mStreamInfo.end(); it++) {
2184 if ((*it)->stream == newStream) {
2185 QCamera3ProcessingChannel *channel =
2186 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2187 stream_exists = true;
2188 if (channel)
2189 delete channel;
2190 (*it)->status = VALID;
2191 (*it)->stream->priv = NULL;
2192 (*it)->channel = NULL;
2193 }
2194 }
2195 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2196 //new stream
2197 stream_info_t* stream_info;
2198 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2199 if (!stream_info) {
2200 LOGE("Could not allocate stream info");
2201 rc = -ENOMEM;
2202 pthread_mutex_unlock(&mMutex);
2203 return rc;
2204 }
2205 stream_info->stream = newStream;
2206 stream_info->status = VALID;
2207 stream_info->channel = NULL;
2208 mStreamInfo.push_back(stream_info);
2209 }
2210 /* Covers Opaque ZSL and API1 F/W ZSL */
2211 if (IS_USAGE_ZSL(newStream->usage)
2212 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2213 if (zslStream != NULL) {
2214 LOGE("Multiple input/reprocess streams requested!");
2215 pthread_mutex_unlock(&mMutex);
2216 return BAD_VALUE;
2217 }
2218 zslStream = newStream;
2219 }
2220 /* Covers YUV reprocess */
2221 if (inputStream != NULL) {
2222 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2223 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2224 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2225 && inputStream->width == newStream->width
2226 && inputStream->height == newStream->height) {
2227 if (zslStream != NULL) {
2228 /* This scenario indicates multiple YUV streams with same size
2229 * as input stream have been requested, since zsl stream handle
2230 * is solely use for the purpose of overriding the size of streams
2231 * which share h/w streams we will just make a guess here as to
2232 * which of the stream is a ZSL stream, this will be refactored
2233 * once we make generic logic for streams sharing encoder output
2234 */
2235 LOGH("Warning, Multiple ip/reprocess streams requested!");
2236 }
2237 zslStream = newStream;
2238 }
2239 }
2240 }
2241
2242 /* If a zsl stream is set, we know that we have configured at least one input or
2243 bidirectional stream */
2244 if (NULL != zslStream) {
2245 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2246 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2247 mInputStreamInfo.format = zslStream->format;
2248 mInputStreamInfo.usage = zslStream->usage;
2249 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2250 mInputStreamInfo.dim.width,
2251 mInputStreamInfo.dim.height,
2252 mInputStreamInfo.format, mInputStreamInfo.usage);
2253 }
2254
2255 cleanAndSortStreamInfo();
2256 if (mMetadataChannel) {
2257 delete mMetadataChannel;
2258 mMetadataChannel = NULL;
2259 }
2260 if (mSupportChannel) {
2261 delete mSupportChannel;
2262 mSupportChannel = NULL;
2263 }
2264
2265 if (mAnalysisChannel) {
2266 delete mAnalysisChannel;
2267 mAnalysisChannel = NULL;
2268 }
2269
2270 if (mDummyBatchChannel) {
2271 delete mDummyBatchChannel;
2272 mDummyBatchChannel = NULL;
2273 }
2274
Emilian Peev7650c122017-01-19 08:24:33 -08002275 if (mDepthChannel) {
2276 mDepthChannel = NULL;
2277 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002278 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002279
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002280 mShutterDispatcher.clear();
2281 mOutputBufferDispatcher.clear();
2282
Thierry Strudel2896d122017-02-23 19:18:03 -08002283 char is_type_value[PROPERTY_VALUE_MAX];
2284 property_get("persist.camera.is_type", is_type_value, "4");
2285 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2286
Binhao Line406f062017-05-03 14:39:44 -07002287 char property_value[PROPERTY_VALUE_MAX];
2288 property_get("persist.camera.gzoom.at", property_value, "0");
2289 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002290 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2291 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2292 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2293 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002294
2295 property_get("persist.camera.gzoom.4k", property_value, "0");
2296 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2297
Thierry Strudel3d639192016-09-09 11:52:26 -07002298 //Create metadata channel and initialize it
2299 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2300 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2301 gCamCapability[mCameraId]->color_arrangement);
2302 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2303 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002304 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 if (mMetadataChannel == NULL) {
2306 LOGE("failed to allocate metadata channel");
2307 rc = -ENOMEM;
2308 pthread_mutex_unlock(&mMutex);
2309 return rc;
2310 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002311 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002312 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2313 if (rc < 0) {
2314 LOGE("metadata channel initialization failed");
2315 delete mMetadataChannel;
2316 mMetadataChannel = NULL;
2317 pthread_mutex_unlock(&mMutex);
2318 return rc;
2319 }
2320
Thierry Strudel2896d122017-02-23 19:18:03 -08002321 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002322 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002323 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002324 // Keep track of preview/video streams indices.
2325 // There could be more than one preview streams, but only one video stream.
2326 int32_t video_stream_idx = -1;
2327 int32_t preview_stream_idx[streamList->num_streams];
2328 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002329 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2330 /* Allocate channel objects for the requested streams */
2331 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002332
Thierry Strudel3d639192016-09-09 11:52:26 -07002333 camera3_stream_t *newStream = streamList->streams[i];
2334 uint32_t stream_usage = newStream->usage;
2335 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2336 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2337 struct camera_info *p_info = NULL;
2338 pthread_mutex_lock(&gCamLock);
2339 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2340 pthread_mutex_unlock(&gCamLock);
2341 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2342 || IS_USAGE_ZSL(newStream->usage)) &&
2343 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002344 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002345 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002346 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2347 if (bUseCommonFeatureMask)
2348 zsl_ppmask = commonFeatureMask;
2349 else
2350 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002352 if (numStreamsOnEncoder > 0)
2353 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2354 else
2355 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002356 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002357 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002358 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002359 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002360 LOGH("Input stream configured, reprocess config");
2361 } else {
2362 //for non zsl streams find out the format
2363 switch (newStream->format) {
2364 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2365 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002366 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002367 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2368 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2369 /* add additional features to pp feature mask */
2370 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2371 mStreamConfigInfo.num_streams);
2372
2373 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2374 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2375 CAM_STREAM_TYPE_VIDEO;
2376 if (m_bTnrEnabled && m_bTnrVideo) {
2377 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2378 CAM_QCOM_FEATURE_CPP_TNR;
2379 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2380 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2381 ~CAM_QCOM_FEATURE_CDS;
2382 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002383 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2384 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2385 CAM_QTI_FEATURE_PPEISCORE;
2386 }
Binhao Line406f062017-05-03 14:39:44 -07002387 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2388 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2389 CAM_QCOM_FEATURE_GOOG_ZOOM;
2390 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002391 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002392 } else {
2393 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2394 CAM_STREAM_TYPE_PREVIEW;
2395 if (m_bTnrEnabled && m_bTnrPreview) {
2396 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2397 CAM_QCOM_FEATURE_CPP_TNR;
2398 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2399 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2400 ~CAM_QCOM_FEATURE_CDS;
2401 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002402 if(!m_bSwTnrPreview) {
2403 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2404 ~CAM_QTI_FEATURE_SW_TNR;
2405 }
Binhao Line406f062017-05-03 14:39:44 -07002406 if (is_goog_zoom_preview_enabled) {
2407 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2408 CAM_QCOM_FEATURE_GOOG_ZOOM;
2409 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002410 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002411 padding_info.width_padding = mSurfaceStridePadding;
2412 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002413 previewSize.width = (int32_t)newStream->width;
2414 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 }
2416 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2417 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2418 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2419 newStream->height;
2420 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2421 newStream->width;
2422 }
2423 }
2424 break;
2425 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002426 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002427 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2428 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2429 if (bUseCommonFeatureMask)
2430 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2431 commonFeatureMask;
2432 else
2433 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2434 CAM_QCOM_FEATURE_NONE;
2435 } else {
2436 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2437 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2438 }
2439 break;
2440 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002441 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002442 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2443 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2444 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2445 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2446 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002447 /* Remove rotation if it is not supported
2448 for 4K LiveVideo snapshot case (online processing) */
2449 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2450 CAM_QCOM_FEATURE_ROTATION)) {
2451 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2452 &= ~CAM_QCOM_FEATURE_ROTATION;
2453 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002454 } else {
2455 if (bUseCommonFeatureMask &&
2456 isOnEncoder(maxViewfinderSize, newStream->width,
2457 newStream->height)) {
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2459 } else {
2460 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2461 }
2462 }
2463 if (isZsl) {
2464 if (zslStream) {
2465 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2466 (int32_t)zslStream->width;
2467 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2468 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002469 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2470 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002471 } else {
2472 LOGE("Error, No ZSL stream identified");
2473 pthread_mutex_unlock(&mMutex);
2474 return -EINVAL;
2475 }
2476 } else if (m_bIs4KVideo) {
2477 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2478 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2479 } else if (bYuv888OverrideJpeg) {
2480 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2481 (int32_t)largeYuv888Size.width;
2482 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2483 (int32_t)largeYuv888Size.height;
2484 }
2485 break;
2486 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2487 case HAL_PIXEL_FORMAT_RAW16:
2488 case HAL_PIXEL_FORMAT_RAW10:
2489 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2490 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2491 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002492 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2493 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2494 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2495 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2496 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2497 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2498 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2499 gCamCapability[mCameraId]->dt[mPDIndex];
2500 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2501 gCamCapability[mCameraId]->vc[mPDIndex];
2502 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002503 break;
2504 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002505 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002506 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2508 break;
2509 }
2510 }
2511
2512 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2513 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2514 gCamCapability[mCameraId]->color_arrangement);
2515
2516 if (newStream->priv == NULL) {
2517 //New stream, construct channel
2518 switch (newStream->stream_type) {
2519 case CAMERA3_STREAM_INPUT:
2520 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2521 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2522 break;
2523 case CAMERA3_STREAM_BIDIRECTIONAL:
2524 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2525 GRALLOC_USAGE_HW_CAMERA_WRITE;
2526 break;
2527 case CAMERA3_STREAM_OUTPUT:
2528 /* For video encoding stream, set read/write rarely
2529 * flag so that they may be set to un-cached */
2530 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2531 newStream->usage |=
2532 (GRALLOC_USAGE_SW_READ_RARELY |
2533 GRALLOC_USAGE_SW_WRITE_RARELY |
2534 GRALLOC_USAGE_HW_CAMERA_WRITE);
2535 else if (IS_USAGE_ZSL(newStream->usage))
2536 {
2537 LOGD("ZSL usage flag skipping");
2538 }
2539 else if (newStream == zslStream
2540 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2541 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2542 } else
2543 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2544 break;
2545 default:
2546 LOGE("Invalid stream_type %d", newStream->stream_type);
2547 break;
2548 }
2549
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002550 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002551 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2552 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2553 QCamera3ProcessingChannel *channel = NULL;
2554 switch (newStream->format) {
2555 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2556 if ((newStream->usage &
2557 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2558 (streamList->operation_mode ==
2559 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2560 ) {
2561 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2562 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002563 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002564 this,
2565 newStream,
2566 (cam_stream_type_t)
2567 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2568 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2569 mMetadataChannel,
2570 0); //heap buffers are not required for HFR video channel
2571 if (channel == NULL) {
2572 LOGE("allocation of channel failed");
2573 pthread_mutex_unlock(&mMutex);
2574 return -ENOMEM;
2575 }
2576 //channel->getNumBuffers() will return 0 here so use
2577 //MAX_INFLIGH_HFR_REQUESTS
2578 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2579 newStream->priv = channel;
2580 LOGI("num video buffers in HFR mode: %d",
2581 MAX_INFLIGHT_HFR_REQUESTS);
2582 } else {
2583 /* Copy stream contents in HFR preview only case to create
2584 * dummy batch channel so that sensor streaming is in
2585 * HFR mode */
2586 if (!m_bIsVideo && (streamList->operation_mode ==
2587 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2588 mDummyBatchStream = *newStream;
2589 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002590 int bufferCount = MAX_INFLIGHT_REQUESTS;
2591 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2592 CAM_STREAM_TYPE_VIDEO) {
2593 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2594 bufferCount = MAX_VIDEO_BUFFERS;
2595 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002596 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2597 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002598 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002599 this,
2600 newStream,
2601 (cam_stream_type_t)
2602 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2603 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2604 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002605 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002606 if (channel == NULL) {
2607 LOGE("allocation of channel failed");
2608 pthread_mutex_unlock(&mMutex);
2609 return -ENOMEM;
2610 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002611 /* disable UBWC for preview, though supported,
2612 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002613 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002614 (previewSize.width == (int32_t)videoWidth)&&
2615 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002616 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002617 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002619 /* When goog_zoom is linked to the preview or video stream,
2620 * disable ubwc to the linked stream */
2621 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2622 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2623 channel->setUBWCEnabled(false);
2624 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002625 newStream->max_buffers = channel->getNumBuffers();
2626 newStream->priv = channel;
2627 }
2628 break;
2629 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2630 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2631 mChannelHandle,
2632 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002633 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002634 this,
2635 newStream,
2636 (cam_stream_type_t)
2637 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2638 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2639 mMetadataChannel);
2640 if (channel == NULL) {
2641 LOGE("allocation of YUV channel failed");
2642 pthread_mutex_unlock(&mMutex);
2643 return -ENOMEM;
2644 }
2645 newStream->max_buffers = channel->getNumBuffers();
2646 newStream->priv = channel;
2647 break;
2648 }
2649 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2650 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002651 case HAL_PIXEL_FORMAT_RAW10: {
2652 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2653 (HAL_DATASPACE_DEPTH != newStream->data_space))
2654 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002655 mRawChannel = new QCamera3RawChannel(
2656 mCameraHandle->camera_handle, mChannelHandle,
2657 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002658 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002659 this, newStream,
2660 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002661 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002662 if (mRawChannel == NULL) {
2663 LOGE("allocation of raw channel failed");
2664 pthread_mutex_unlock(&mMutex);
2665 return -ENOMEM;
2666 }
2667 newStream->max_buffers = mRawChannel->getNumBuffers();
2668 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2669 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002670 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002672 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2673 mDepthChannel = new QCamera3DepthChannel(
2674 mCameraHandle->camera_handle, mChannelHandle,
2675 mCameraHandle->ops, NULL, NULL, &padding_info,
2676 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2677 mMetadataChannel);
2678 if (NULL == mDepthChannel) {
2679 LOGE("Allocation of depth channel failed");
2680 pthread_mutex_unlock(&mMutex);
2681 return NO_MEMORY;
2682 }
2683 newStream->priv = mDepthChannel;
2684 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2685 } else {
2686 // Max live snapshot inflight buffer is 1. This is to mitigate
2687 // frame drop issues for video snapshot. The more buffers being
2688 // allocated, the more frame drops there are.
2689 mPictureChannel = new QCamera3PicChannel(
2690 mCameraHandle->camera_handle, mChannelHandle,
2691 mCameraHandle->ops, captureResultCb,
2692 setBufferErrorStatus, &padding_info, this, newStream,
2693 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2694 m_bIs4KVideo, isZsl, mMetadataChannel,
2695 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2696 if (mPictureChannel == NULL) {
2697 LOGE("allocation of channel failed");
2698 pthread_mutex_unlock(&mMutex);
2699 return -ENOMEM;
2700 }
2701 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2702 newStream->max_buffers = mPictureChannel->getNumBuffers();
2703 mPictureChannel->overrideYuvSize(
2704 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2705 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002707 break;
2708
2709 default:
2710 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002711 pthread_mutex_unlock(&mMutex);
2712 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002713 }
2714 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2715 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2716 } else {
2717 LOGE("Error, Unknown stream type");
2718 pthread_mutex_unlock(&mMutex);
2719 return -EINVAL;
2720 }
2721
2722 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002723 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002724 // Here we only care whether it's EIS3 or not
2725 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2726 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2727 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2728 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002729 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002730 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002731 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002732 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2733 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2734 }
2735 }
2736
2737 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2738 it != mStreamInfo.end(); it++) {
2739 if ((*it)->stream == newStream) {
2740 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2741 break;
2742 }
2743 }
2744 } else {
2745 // Channel already exists for this stream
2746 // Do nothing for now
2747 }
2748 padding_info = gCamCapability[mCameraId]->padding_info;
2749
Emilian Peev7650c122017-01-19 08:24:33 -08002750 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002751 * since there is no real stream associated with it
2752 */
Emilian Peev7650c122017-01-19 08:24:33 -08002753 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002754 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2755 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002756 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002757 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002758 }
2759
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002760 // Let buffer dispatcher know the configured streams.
2761 mOutputBufferDispatcher.configureStreams(streamList);
2762
Binhao Lincdb362a2017-04-20 13:31:54 -07002763 // By default, preview stream TNR is disabled.
2764 // Enable TNR to the preview stream if all conditions below are satisfied:
2765 // 1. resolution <= 1080p.
2766 // 2. preview resolution == video resolution.
2767 // 3. video stream TNR is enabled.
2768 // 4. EIS2.0
2769 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2770 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2771 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2772 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2773 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2774 video_stream->width == preview_stream->width &&
2775 video_stream->height == preview_stream->height) {
2776 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2777 CAM_QCOM_FEATURE_CPP_TNR;
2778 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2779 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2780 ~CAM_QCOM_FEATURE_CDS;
2781 }
2782 }
2783
Thierry Strudel2896d122017-02-23 19:18:03 -08002784 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2785 onlyRaw = false;
2786 }
2787
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002788 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002789 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002790 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002791 cam_analysis_info_t analysisInfo;
2792 int32_t ret = NO_ERROR;
2793 ret = mCommon.getAnalysisInfo(
2794 FALSE,
2795 analysisFeatureMask,
2796 &analysisInfo);
2797 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002798 cam_color_filter_arrangement_t analysis_color_arrangement =
2799 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2800 CAM_FILTER_ARRANGEMENT_Y :
2801 gCamCapability[mCameraId]->color_arrangement);
2802 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2803 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002804 cam_dimension_t analysisDim;
2805 analysisDim = mCommon.getMatchingDimension(previewSize,
2806 analysisInfo.analysis_recommended_res);
2807
2808 mAnalysisChannel = new QCamera3SupportChannel(
2809 mCameraHandle->camera_handle,
2810 mChannelHandle,
2811 mCameraHandle->ops,
2812 &analysisInfo.analysis_padding_info,
2813 analysisFeatureMask,
2814 CAM_STREAM_TYPE_ANALYSIS,
2815 &analysisDim,
2816 (analysisInfo.analysis_format
2817 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2818 : CAM_FORMAT_YUV_420_NV21),
2819 analysisInfo.hw_analysis_supported,
2820 gCamCapability[mCameraId]->color_arrangement,
2821 this,
2822 0); // force buffer count to 0
2823 } else {
2824 LOGW("getAnalysisInfo failed, ret = %d", ret);
2825 }
2826 if (!mAnalysisChannel) {
2827 LOGW("Analysis channel cannot be created");
2828 }
2829 }
2830
Thierry Strudel3d639192016-09-09 11:52:26 -07002831 //RAW DUMP channel
2832 if (mEnableRawDump && isRawStreamRequested == false){
2833 cam_dimension_t rawDumpSize;
2834 rawDumpSize = getMaxRawSize(mCameraId);
2835 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2836 setPAAFSupport(rawDumpFeatureMask,
2837 CAM_STREAM_TYPE_RAW,
2838 gCamCapability[mCameraId]->color_arrangement);
2839 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2840 mChannelHandle,
2841 mCameraHandle->ops,
2842 rawDumpSize,
2843 &padding_info,
2844 this, rawDumpFeatureMask);
2845 if (!mRawDumpChannel) {
2846 LOGE("Raw Dump channel cannot be created");
2847 pthread_mutex_unlock(&mMutex);
2848 return -ENOMEM;
2849 }
2850 }
2851
Thierry Strudel3d639192016-09-09 11:52:26 -07002852 if (mAnalysisChannel) {
2853 cam_analysis_info_t analysisInfo;
2854 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2855 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2856 CAM_STREAM_TYPE_ANALYSIS;
2857 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2858 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002859 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002860 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2861 &analysisInfo);
2862 if (rc != NO_ERROR) {
2863 LOGE("getAnalysisInfo failed, ret = %d", rc);
2864 pthread_mutex_unlock(&mMutex);
2865 return rc;
2866 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002867 cam_color_filter_arrangement_t analysis_color_arrangement =
2868 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2869 CAM_FILTER_ARRANGEMENT_Y :
2870 gCamCapability[mCameraId]->color_arrangement);
2871 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2872 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2873 analysis_color_arrangement);
2874
Thierry Strudel3d639192016-09-09 11:52:26 -07002875 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002876 mCommon.getMatchingDimension(previewSize,
2877 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002878 mStreamConfigInfo.num_streams++;
2879 }
2880
Thierry Strudel2896d122017-02-23 19:18:03 -08002881 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 cam_analysis_info_t supportInfo;
2883 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2884 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2885 setPAAFSupport(callbackFeatureMask,
2886 CAM_STREAM_TYPE_CALLBACK,
2887 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002888 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002889 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002890 if (ret != NO_ERROR) {
2891 /* Ignore the error for Mono camera
2892 * because the PAAF bit mask is only set
2893 * for CAM_STREAM_TYPE_ANALYSIS stream type
2894 */
2895 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2896 LOGW("getAnalysisInfo failed, ret = %d", ret);
2897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002898 }
2899 mSupportChannel = new QCamera3SupportChannel(
2900 mCameraHandle->camera_handle,
2901 mChannelHandle,
2902 mCameraHandle->ops,
2903 &gCamCapability[mCameraId]->padding_info,
2904 callbackFeatureMask,
2905 CAM_STREAM_TYPE_CALLBACK,
2906 &QCamera3SupportChannel::kDim,
2907 CAM_FORMAT_YUV_420_NV21,
2908 supportInfo.hw_analysis_supported,
2909 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002910 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 if (!mSupportChannel) {
2912 LOGE("dummy channel cannot be created");
2913 pthread_mutex_unlock(&mMutex);
2914 return -ENOMEM;
2915 }
2916 }
2917
2918 if (mSupportChannel) {
2919 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2920 QCamera3SupportChannel::kDim;
2921 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2922 CAM_STREAM_TYPE_CALLBACK;
2923 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2924 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2925 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2926 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2927 gCamCapability[mCameraId]->color_arrangement);
2928 mStreamConfigInfo.num_streams++;
2929 }
2930
2931 if (mRawDumpChannel) {
2932 cam_dimension_t rawSize;
2933 rawSize = getMaxRawSize(mCameraId);
2934 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2935 rawSize;
2936 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2937 CAM_STREAM_TYPE_RAW;
2938 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2939 CAM_QCOM_FEATURE_NONE;
2940 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2941 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2942 gCamCapability[mCameraId]->color_arrangement);
2943 mStreamConfigInfo.num_streams++;
2944 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002945
2946 if (mHdrPlusRawSrcChannel) {
2947 cam_dimension_t rawSize;
2948 rawSize = getMaxRawSize(mCameraId);
2949 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2950 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2951 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2952 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2953 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2954 gCamCapability[mCameraId]->color_arrangement);
2955 mStreamConfigInfo.num_streams++;
2956 }
2957
Thierry Strudel3d639192016-09-09 11:52:26 -07002958 /* In HFR mode, if video stream is not added, create a dummy channel so that
2959 * ISP can create a batch mode even for preview only case. This channel is
2960 * never 'start'ed (no stream-on), it is only 'initialized' */
2961 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2962 !m_bIsVideo) {
2963 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2964 setPAAFSupport(dummyFeatureMask,
2965 CAM_STREAM_TYPE_VIDEO,
2966 gCamCapability[mCameraId]->color_arrangement);
2967 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2968 mChannelHandle,
2969 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002970 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002971 this,
2972 &mDummyBatchStream,
2973 CAM_STREAM_TYPE_VIDEO,
2974 dummyFeatureMask,
2975 mMetadataChannel);
2976 if (NULL == mDummyBatchChannel) {
2977 LOGE("creation of mDummyBatchChannel failed."
2978 "Preview will use non-hfr sensor mode ");
2979 }
2980 }
2981 if (mDummyBatchChannel) {
2982 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2983 mDummyBatchStream.width;
2984 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2985 mDummyBatchStream.height;
2986 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2987 CAM_STREAM_TYPE_VIDEO;
2988 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2989 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2990 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2991 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2992 gCamCapability[mCameraId]->color_arrangement);
2993 mStreamConfigInfo.num_streams++;
2994 }
2995
2996 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2997 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002998 m_bIs4KVideo ? 0 :
2999 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003000
3001 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3002 for (pendingRequestIterator i = mPendingRequestsList.begin();
3003 i != mPendingRequestsList.end();) {
3004 i = erasePendingRequest(i);
3005 }
3006 mPendingFrameDropList.clear();
3007 // Initialize/Reset the pending buffers list
3008 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3009 req.mPendingBufferList.clear();
3010 }
3011 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3012
Thierry Strudel3d639192016-09-09 11:52:26 -07003013 mCurJpegMeta.clear();
3014 //Get min frame duration for this streams configuration
3015 deriveMinFrameDuration();
3016
Chien-Yu Chenee335912017-02-09 17:53:20 -08003017 mFirstPreviewIntentSeen = false;
3018
3019 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003020 {
3021 Mutex::Autolock l(gHdrPlusClientLock);
3022 disableHdrPlusModeLocked();
3023 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003024
Thierry Strudel3d639192016-09-09 11:52:26 -07003025 // Update state
3026 mState = CONFIGURED;
3027
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003028 mFirstMetadataCallback = true;
3029
Thierry Strudel3d639192016-09-09 11:52:26 -07003030 pthread_mutex_unlock(&mMutex);
3031
3032 return rc;
3033}
3034
3035/*===========================================================================
3036 * FUNCTION : validateCaptureRequest
3037 *
3038 * DESCRIPTION: validate a capture request from camera service
3039 *
3040 * PARAMETERS :
3041 * @request : request from framework to process
3042 *
3043 * RETURN :
3044 *
3045 *==========================================================================*/
3046int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003047 camera3_capture_request_t *request,
3048 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003049{
3050 ssize_t idx = 0;
3051 const camera3_stream_buffer_t *b;
3052 CameraMetadata meta;
3053
3054 /* Sanity check the request */
3055 if (request == NULL) {
3056 LOGE("NULL capture request");
3057 return BAD_VALUE;
3058 }
3059
3060 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3061 /*settings cannot be null for the first request*/
3062 return BAD_VALUE;
3063 }
3064
3065 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003066 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3067 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003068 LOGE("Request %d: No output buffers provided!",
3069 __FUNCTION__, frameNumber);
3070 return BAD_VALUE;
3071 }
3072 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3073 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3074 request->num_output_buffers, MAX_NUM_STREAMS);
3075 return BAD_VALUE;
3076 }
3077 if (request->input_buffer != NULL) {
3078 b = request->input_buffer;
3079 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3080 LOGE("Request %d: Buffer %ld: Status not OK!",
3081 frameNumber, (long)idx);
3082 return BAD_VALUE;
3083 }
3084 if (b->release_fence != -1) {
3085 LOGE("Request %d: Buffer %ld: Has a release fence!",
3086 frameNumber, (long)idx);
3087 return BAD_VALUE;
3088 }
3089 if (b->buffer == NULL) {
3090 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3091 frameNumber, (long)idx);
3092 return BAD_VALUE;
3093 }
3094 }
3095
3096 // Validate all buffers
3097 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003098 if (b == NULL) {
3099 return BAD_VALUE;
3100 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003101 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003102 QCamera3ProcessingChannel *channel =
3103 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3104 if (channel == NULL) {
3105 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3106 frameNumber, (long)idx);
3107 return BAD_VALUE;
3108 }
3109 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3110 LOGE("Request %d: Buffer %ld: Status not OK!",
3111 frameNumber, (long)idx);
3112 return BAD_VALUE;
3113 }
3114 if (b->release_fence != -1) {
3115 LOGE("Request %d: Buffer %ld: Has a release fence!",
3116 frameNumber, (long)idx);
3117 return BAD_VALUE;
3118 }
3119 if (b->buffer == NULL) {
3120 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3121 frameNumber, (long)idx);
3122 return BAD_VALUE;
3123 }
3124 if (*(b->buffer) == NULL) {
3125 LOGE("Request %d: Buffer %ld: NULL private handle!",
3126 frameNumber, (long)idx);
3127 return BAD_VALUE;
3128 }
3129 idx++;
3130 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003131 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003132 return NO_ERROR;
3133}
3134
3135/*===========================================================================
3136 * FUNCTION : deriveMinFrameDuration
3137 *
3138 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3139 * on currently configured streams.
3140 *
3141 * PARAMETERS : NONE
3142 *
3143 * RETURN : NONE
3144 *
3145 *==========================================================================*/
3146void QCamera3HardwareInterface::deriveMinFrameDuration()
3147{
3148 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003149 bool hasRaw = false;
3150
3151 mMinRawFrameDuration = 0;
3152 mMinJpegFrameDuration = 0;
3153 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003154
3155 maxJpegDim = 0;
3156 maxProcessedDim = 0;
3157 maxRawDim = 0;
3158
3159 // Figure out maximum jpeg, processed, and raw dimensions
3160 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3161 it != mStreamInfo.end(); it++) {
3162
3163 // Input stream doesn't have valid stream_type
3164 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3165 continue;
3166
3167 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3168 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3169 if (dimension > maxJpegDim)
3170 maxJpegDim = dimension;
3171 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3172 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3173 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003174 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003175 if (dimension > maxRawDim)
3176 maxRawDim = dimension;
3177 } else {
3178 if (dimension > maxProcessedDim)
3179 maxProcessedDim = dimension;
3180 }
3181 }
3182
3183 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3184 MAX_SIZES_CNT);
3185
3186 //Assume all jpeg dimensions are in processed dimensions.
3187 if (maxJpegDim > maxProcessedDim)
3188 maxProcessedDim = maxJpegDim;
3189 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003190 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003191 maxRawDim = INT32_MAX;
3192
3193 for (size_t i = 0; i < count; i++) {
3194 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3195 gCamCapability[mCameraId]->raw_dim[i].height;
3196 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3197 maxRawDim = dimension;
3198 }
3199 }
3200
3201 //Find minimum durations for processed, jpeg, and raw
3202 for (size_t i = 0; i < count; i++) {
3203 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3204 gCamCapability[mCameraId]->raw_dim[i].height) {
3205 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3206 break;
3207 }
3208 }
3209 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3210 for (size_t i = 0; i < count; i++) {
3211 if (maxProcessedDim ==
3212 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3213 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3214 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3215 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3216 break;
3217 }
3218 }
3219}
3220
3221/*===========================================================================
3222 * FUNCTION : getMinFrameDuration
3223 *
3224 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3225 * and current request configuration.
3226 *
3227 * PARAMETERS : @request: requset sent by the frameworks
3228 *
3229 * RETURN : min farme duration for a particular request
3230 *
3231 *==========================================================================*/
3232int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3233{
3234 bool hasJpegStream = false;
3235 bool hasRawStream = false;
3236 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3237 const camera3_stream_t *stream = request->output_buffers[i].stream;
3238 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3239 hasJpegStream = true;
3240 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3241 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3242 stream->format == HAL_PIXEL_FORMAT_RAW16)
3243 hasRawStream = true;
3244 }
3245
3246 if (!hasJpegStream)
3247 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3248 else
3249 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3250}
3251
3252/*===========================================================================
3253 * FUNCTION : handleBuffersDuringFlushLock
3254 *
3255 * DESCRIPTION: Account for buffers returned from back-end during flush
3256 * This function is executed while mMutex is held by the caller.
3257 *
3258 * PARAMETERS :
3259 * @buffer: image buffer for the callback
3260 *
3261 * RETURN :
3262 *==========================================================================*/
3263void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3264{
3265 bool buffer_found = false;
3266 for (List<PendingBuffersInRequest>::iterator req =
3267 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3268 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3269 for (List<PendingBufferInfo>::iterator i =
3270 req->mPendingBufferList.begin();
3271 i != req->mPendingBufferList.end(); i++) {
3272 if (i->buffer == buffer->buffer) {
3273 mPendingBuffersMap.numPendingBufsAtFlush--;
3274 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3275 buffer->buffer, req->frame_number,
3276 mPendingBuffersMap.numPendingBufsAtFlush);
3277 buffer_found = true;
3278 break;
3279 }
3280 }
3281 if (buffer_found) {
3282 break;
3283 }
3284 }
3285 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3286 //signal the flush()
3287 LOGD("All buffers returned to HAL. Continue flush");
3288 pthread_cond_signal(&mBuffersCond);
3289 }
3290}
3291
Thierry Strudel3d639192016-09-09 11:52:26 -07003292/*===========================================================================
3293 * FUNCTION : handleBatchMetadata
3294 *
3295 * DESCRIPTION: Handles metadata buffer callback in batch mode
3296 *
3297 * PARAMETERS : @metadata_buf: metadata buffer
3298 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3299 * the meta buf in this method
3300 *
3301 * RETURN :
3302 *
3303 *==========================================================================*/
3304void QCamera3HardwareInterface::handleBatchMetadata(
3305 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3306{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003307 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003308
3309 if (NULL == metadata_buf) {
3310 LOGE("metadata_buf is NULL");
3311 return;
3312 }
3313 /* In batch mode, the metdata will contain the frame number and timestamp of
3314 * the last frame in the batch. Eg: a batch containing buffers from request
3315 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3316 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3317 * multiple process_capture_results */
3318 metadata_buffer_t *metadata =
3319 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3320 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3321 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3322 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3323 uint32_t frame_number = 0, urgent_frame_number = 0;
3324 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3325 bool invalid_metadata = false;
3326 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3327 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003328 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003329
3330 int32_t *p_frame_number_valid =
3331 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3332 uint32_t *p_frame_number =
3333 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3334 int64_t *p_capture_time =
3335 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3336 int32_t *p_urgent_frame_number_valid =
3337 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3338 uint32_t *p_urgent_frame_number =
3339 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3340
3341 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3342 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3343 (NULL == p_urgent_frame_number)) {
3344 LOGE("Invalid metadata");
3345 invalid_metadata = true;
3346 } else {
3347 frame_number_valid = *p_frame_number_valid;
3348 last_frame_number = *p_frame_number;
3349 last_frame_capture_time = *p_capture_time;
3350 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3351 last_urgent_frame_number = *p_urgent_frame_number;
3352 }
3353
3354 /* In batchmode, when no video buffers are requested, set_parms are sent
3355 * for every capture_request. The difference between consecutive urgent
3356 * frame numbers and frame numbers should be used to interpolate the
3357 * corresponding frame numbers and time stamps */
3358 pthread_mutex_lock(&mMutex);
3359 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003360 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3361 if(idx < 0) {
3362 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3363 last_urgent_frame_number);
3364 mState = ERROR;
3365 pthread_mutex_unlock(&mMutex);
3366 return;
3367 }
3368 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003369 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3370 first_urgent_frame_number;
3371
3372 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3373 urgent_frame_number_valid,
3374 first_urgent_frame_number, last_urgent_frame_number);
3375 }
3376
3377 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003378 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3379 if(idx < 0) {
3380 LOGE("Invalid frame number received: %d. Irrecoverable error",
3381 last_frame_number);
3382 mState = ERROR;
3383 pthread_mutex_unlock(&mMutex);
3384 return;
3385 }
3386 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003387 frameNumDiff = last_frame_number + 1 -
3388 first_frame_number;
3389 mPendingBatchMap.removeItem(last_frame_number);
3390
3391 LOGD("frm: valid: %d frm_num: %d - %d",
3392 frame_number_valid,
3393 first_frame_number, last_frame_number);
3394
3395 }
3396 pthread_mutex_unlock(&mMutex);
3397
3398 if (urgent_frame_number_valid || frame_number_valid) {
3399 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3400 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3401 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3402 urgentFrameNumDiff, last_urgent_frame_number);
3403 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3404 LOGE("frameNumDiff: %d frameNum: %d",
3405 frameNumDiff, last_frame_number);
3406 }
3407
3408 for (size_t i = 0; i < loopCount; i++) {
3409 /* handleMetadataWithLock is called even for invalid_metadata for
3410 * pipeline depth calculation */
3411 if (!invalid_metadata) {
3412 /* Infer frame number. Batch metadata contains frame number of the
3413 * last frame */
3414 if (urgent_frame_number_valid) {
3415 if (i < urgentFrameNumDiff) {
3416 urgent_frame_number =
3417 first_urgent_frame_number + i;
3418 LOGD("inferred urgent frame_number: %d",
3419 urgent_frame_number);
3420 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3421 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3422 } else {
3423 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3424 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3425 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3426 }
3427 }
3428
3429 /* Infer frame number. Batch metadata contains frame number of the
3430 * last frame */
3431 if (frame_number_valid) {
3432 if (i < frameNumDiff) {
3433 frame_number = first_frame_number + i;
3434 LOGD("inferred frame_number: %d", frame_number);
3435 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3436 CAM_INTF_META_FRAME_NUMBER, frame_number);
3437 } else {
3438 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3439 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3440 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3441 }
3442 }
3443
3444 if (last_frame_capture_time) {
3445 //Infer timestamp
3446 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003447 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003448 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003449 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003450 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3451 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3452 LOGD("batch capture_time: %lld, capture_time: %lld",
3453 last_frame_capture_time, capture_time);
3454 }
3455 }
3456 pthread_mutex_lock(&mMutex);
3457 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003458 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003459 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3460 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003461 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003462 pthread_mutex_unlock(&mMutex);
3463 }
3464
3465 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003466 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003467 mMetadataChannel->bufDone(metadata_buf);
3468 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003469 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003470 }
3471}
3472
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003473void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3474 camera3_error_msg_code_t errorCode)
3475{
3476 camera3_notify_msg_t notify_msg;
3477 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3478 notify_msg.type = CAMERA3_MSG_ERROR;
3479 notify_msg.message.error.error_code = errorCode;
3480 notify_msg.message.error.error_stream = NULL;
3481 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003482 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003483
3484 return;
3485}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003486
3487/*===========================================================================
3488 * FUNCTION : sendPartialMetadataWithLock
3489 *
3490 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3491 *
3492 * PARAMETERS : @metadata: metadata buffer
3493 * @requestIter: The iterator for the pending capture request for
3494 * which the partial result is being sen
3495 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3496 * last urgent metadata in a batch. Always true for non-batch mode
3497 *
3498 * RETURN :
3499 *
3500 *==========================================================================*/
3501
3502void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3503 metadata_buffer_t *metadata,
3504 const pendingRequestIterator requestIter,
3505 bool lastUrgentMetadataInBatch)
3506{
3507 camera3_capture_result_t result;
3508 memset(&result, 0, sizeof(camera3_capture_result_t));
3509
3510 requestIter->partial_result_cnt++;
3511
3512 // Extract 3A metadata
3513 result.result = translateCbUrgentMetadataToResultMetadata(
3514 metadata, lastUrgentMetadataInBatch);
3515 // Populate metadata result
3516 result.frame_number = requestIter->frame_number;
3517 result.num_output_buffers = 0;
3518 result.output_buffers = NULL;
3519 result.partial_result = requestIter->partial_result_cnt;
3520
3521 {
3522 Mutex::Autolock l(gHdrPlusClientLock);
3523 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3524 // Notify HDR+ client about the partial metadata.
3525 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3526 result.partial_result == PARTIAL_RESULT_COUNT);
3527 }
3528 }
3529
3530 orchestrateResult(&result);
3531 LOGD("urgent frame_number = %u", result.frame_number);
3532 free_camera_metadata((camera_metadata_t *)result.result);
3533}
3534
Thierry Strudel3d639192016-09-09 11:52:26 -07003535/*===========================================================================
3536 * FUNCTION : handleMetadataWithLock
3537 *
3538 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3539 *
3540 * PARAMETERS : @metadata_buf: metadata buffer
3541 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3542 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003543 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3544 * last urgent metadata in a batch. Always true for non-batch mode
3545 * @lastMetadataInBatch: Boolean to indicate whether this is the
3546 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003547 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3548 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003549 *
3550 * RETURN :
3551 *
3552 *==========================================================================*/
3553void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003554 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003555 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3556 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003557{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003558 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003559 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3560 //during flush do not send metadata from this thread
3561 LOGD("not sending metadata during flush or when mState is error");
3562 if (free_and_bufdone_meta_buf) {
3563 mMetadataChannel->bufDone(metadata_buf);
3564 free(metadata_buf);
3565 }
3566 return;
3567 }
3568
3569 //not in flush
3570 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3571 int32_t frame_number_valid, urgent_frame_number_valid;
3572 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003573 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003574 nsecs_t currentSysTime;
3575
3576 int32_t *p_frame_number_valid =
3577 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3578 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3579 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003580 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003581 int32_t *p_urgent_frame_number_valid =
3582 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3583 uint32_t *p_urgent_frame_number =
3584 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3585 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3586 metadata) {
3587 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3588 *p_frame_number_valid, *p_frame_number);
3589 }
3590
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003591 camera_metadata_t *resultMetadata = nullptr;
3592
Thierry Strudel3d639192016-09-09 11:52:26 -07003593 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3594 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3595 LOGE("Invalid metadata");
3596 if (free_and_bufdone_meta_buf) {
3597 mMetadataChannel->bufDone(metadata_buf);
3598 free(metadata_buf);
3599 }
3600 goto done_metadata;
3601 }
3602 frame_number_valid = *p_frame_number_valid;
3603 frame_number = *p_frame_number;
3604 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003605 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003606 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3607 urgent_frame_number = *p_urgent_frame_number;
3608 currentSysTime = systemTime(CLOCK_MONOTONIC);
3609
Jason Lee603176d2017-05-31 11:43:27 -07003610 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3611 const int tries = 3;
3612 nsecs_t bestGap, measured;
3613 for (int i = 0; i < tries; ++i) {
3614 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3615 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3616 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3617 const nsecs_t gap = tmono2 - tmono;
3618 if (i == 0 || gap < bestGap) {
3619 bestGap = gap;
3620 measured = tbase - ((tmono + tmono2) >> 1);
3621 }
3622 }
3623 capture_time -= measured;
3624 }
3625
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 // Detect if buffers from any requests are overdue
3627 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003628 int64_t timeout;
3629 {
3630 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3631 // If there is a pending HDR+ request, the following requests may be blocked until the
3632 // HDR+ request is done. So allow a longer timeout.
3633 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3634 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3635 }
3636
3637 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003638 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003639 assert(missed.stream->priv);
3640 if (missed.stream->priv) {
3641 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3642 assert(ch->mStreams[0]);
3643 if (ch->mStreams[0]) {
3644 LOGE("Cancel missing frame = %d, buffer = %p,"
3645 "stream type = %d, stream format = %d",
3646 req.frame_number, missed.buffer,
3647 ch->mStreams[0]->getMyType(), missed.stream->format);
3648 ch->timeoutFrame(req.frame_number);
3649 }
3650 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003651 }
3652 }
3653 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003654 //For the very first metadata callback, regardless whether it contains valid
3655 //frame number, send the partial metadata for the jumpstarting requests.
3656 //Note that this has to be done even if the metadata doesn't contain valid
3657 //urgent frame number, because in the case only 1 request is ever submitted
3658 //to HAL, there won't be subsequent valid urgent frame number.
3659 if (mFirstMetadataCallback) {
3660 for (pendingRequestIterator i =
3661 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3662 if (i->bUseFirstPartial) {
3663 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3664 }
3665 }
3666 mFirstMetadataCallback = false;
3667 }
3668
Thierry Strudel3d639192016-09-09 11:52:26 -07003669 //Partial result on process_capture_result for timestamp
3670 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003671 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003672
3673 //Recieved an urgent Frame Number, handle it
3674 //using partial results
3675 for (pendingRequestIterator i =
3676 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3677 LOGD("Iterator Frame = %d urgent frame = %d",
3678 i->frame_number, urgent_frame_number);
3679
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003680 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003681 (i->partial_result_cnt == 0)) {
3682 LOGE("Error: HAL missed urgent metadata for frame number %d",
3683 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003684 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003685 }
3686
3687 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003688 i->partial_result_cnt == 0) {
3689 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003690 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3691 // Instant AEC settled for this frame.
3692 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3693 mInstantAECSettledFrameNumber = urgent_frame_number;
3694 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003695 break;
3696 }
3697 }
3698 }
3699
3700 if (!frame_number_valid) {
3701 LOGD("Not a valid normal frame number, used as SOF only");
3702 if (free_and_bufdone_meta_buf) {
3703 mMetadataChannel->bufDone(metadata_buf);
3704 free(metadata_buf);
3705 }
3706 goto done_metadata;
3707 }
3708 LOGH("valid frame_number = %u, capture_time = %lld",
3709 frame_number, capture_time);
3710
Emilian Peev4e0fe952017-06-30 12:40:09 -07003711 handleDepthDataLocked(metadata->depth_data, frame_number,
3712 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003713
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003714 // Check whether any stream buffer corresponding to this is dropped or not
3715 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3716 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3717 for (auto & pendingRequest : mPendingRequestsList) {
3718 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3719 mInstantAECSettledFrameNumber)) {
3720 camera3_notify_msg_t notify_msg = {};
3721 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003722 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003723 QCamera3ProcessingChannel *channel =
3724 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003725 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003726 if (p_cam_frame_drop) {
3727 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003728 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003729 // Got the stream ID for drop frame.
3730 dropFrame = true;
3731 break;
3732 }
3733 }
3734 } else {
3735 // This is instant AEC case.
3736 // For instant AEC drop the stream untill AEC is settled.
3737 dropFrame = true;
3738 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003739
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003740 if (dropFrame) {
3741 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3742 if (p_cam_frame_drop) {
3743 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003744 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003745 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003746 } else {
3747 // For instant AEC, inform frame drop and frame number
3748 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3749 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003750 pendingRequest.frame_number, streamID,
3751 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003752 }
3753 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003754 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003755 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003756 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003757 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003758 if (p_cam_frame_drop) {
3759 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003760 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003761 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003762 } else {
3763 // For instant AEC, inform frame drop and frame number
3764 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3765 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003766 pendingRequest.frame_number, streamID,
3767 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003768 }
3769 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003770 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003771 PendingFrameDrop.stream_ID = streamID;
3772 // Add the Frame drop info to mPendingFrameDropList
3773 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003774 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003775 }
3776 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003777 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003778
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003779 for (auto & pendingRequest : mPendingRequestsList) {
3780 // Find the pending request with the frame number.
3781 if (pendingRequest.frame_number == frame_number) {
3782 // Update the sensor timestamp.
3783 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003784
Thierry Strudel3d639192016-09-09 11:52:26 -07003785
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003786 /* Set the timestamp in display metadata so that clients aware of
3787 private_handle such as VT can use this un-modified timestamps.
3788 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003789 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003790
Thierry Strudel3d639192016-09-09 11:52:26 -07003791 // Find channel requiring metadata, meaning internal offline postprocess
3792 // is needed.
3793 //TODO: for now, we don't support two streams requiring metadata at the same time.
3794 // (because we are not making copies, and metadata buffer is not reference counted.
3795 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3797 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003798 if (iter->need_metadata) {
3799 internalPproc = true;
3800 QCamera3ProcessingChannel *channel =
3801 (QCamera3ProcessingChannel *)iter->stream->priv;
3802 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003803 if(p_is_metabuf_queued != NULL) {
3804 *p_is_metabuf_queued = true;
3805 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003806 break;
3807 }
3808 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003809 for (auto itr = pendingRequest.internalRequestList.begin();
3810 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003811 if (itr->need_metadata) {
3812 internalPproc = true;
3813 QCamera3ProcessingChannel *channel =
3814 (QCamera3ProcessingChannel *)itr->stream->priv;
3815 channel->queueReprocMetadata(metadata_buf);
3816 break;
3817 }
3818 }
3819
Thierry Strudel54dc9782017-02-15 12:12:10 -08003820 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003821
3822 bool *enableZsl = nullptr;
3823 if (gExposeEnableZslKey) {
3824 enableZsl = &pendingRequest.enableZsl;
3825 }
3826
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003827 resultMetadata = translateFromHalMetadata(metadata,
3828 pendingRequest.timestamp, pendingRequest.request_id,
3829 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3830 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003831 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003832 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003833 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003834 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003835 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003836 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003837
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003838 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003839
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003840 if (pendingRequest.blob_request) {
3841 //Dump tuning metadata if enabled and available
3842 char prop[PROPERTY_VALUE_MAX];
3843 memset(prop, 0, sizeof(prop));
3844 property_get("persist.camera.dumpmetadata", prop, "0");
3845 int32_t enabled = atoi(prop);
3846 if (enabled && metadata->is_tuning_params_valid) {
3847 dumpMetadataToFile(metadata->tuning_params,
3848 mMetaFrameCount,
3849 enabled,
3850 "Snapshot",
3851 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003852 }
3853 }
3854
3855 if (!internalPproc) {
3856 LOGD("couldn't find need_metadata for this metadata");
3857 // Return metadata buffer
3858 if (free_and_bufdone_meta_buf) {
3859 mMetadataChannel->bufDone(metadata_buf);
3860 free(metadata_buf);
3861 }
3862 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003863
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003864 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003865 }
3866 }
3867
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003868 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3869
3870 // Try to send out capture result metadata.
3871 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003872 return;
3873
Thierry Strudel3d639192016-09-09 11:52:26 -07003874done_metadata:
3875 for (pendingRequestIterator i = mPendingRequestsList.begin();
3876 i != mPendingRequestsList.end() ;i++) {
3877 i->pipeline_depth++;
3878 }
3879 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3880 unblockRequestIfNecessary();
3881}
3882
3883/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003884 * FUNCTION : handleDepthDataWithLock
3885 *
3886 * DESCRIPTION: Handles incoming depth data
3887 *
3888 * PARAMETERS : @depthData : Depth data
3889 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003890 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003891 *
3892 * RETURN :
3893 *
3894 *==========================================================================*/
3895void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003896 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003897 uint32_t currentFrameNumber;
3898 buffer_handle_t *depthBuffer;
3899
3900 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003901 return;
3902 }
3903
3904 camera3_stream_buffer_t resultBuffer =
3905 {.acquire_fence = -1,
3906 .release_fence = -1,
3907 .status = CAMERA3_BUFFER_STATUS_OK,
3908 .buffer = nullptr,
3909 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003910 do {
3911 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3912 if (nullptr == depthBuffer) {
3913 break;
3914 }
3915
Emilian Peev7650c122017-01-19 08:24:33 -08003916 resultBuffer.buffer = depthBuffer;
3917 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003918 if (valid) {
3919 int32_t rc = mDepthChannel->populateDepthData(depthData,
3920 frameNumber);
3921 if (NO_ERROR != rc) {
3922 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3923 } else {
3924 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3925 }
Emilian Peev7650c122017-01-19 08:24:33 -08003926 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003927 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003928 }
3929 } else if (currentFrameNumber > frameNumber) {
3930 break;
3931 } else {
3932 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3933 {{currentFrameNumber, mDepthChannel->getStream(),
3934 CAMERA3_MSG_ERROR_BUFFER}}};
3935 orchestrateNotify(&notify_msg);
3936
3937 LOGE("Depth buffer for frame number: %d is missing "
3938 "returning back!", currentFrameNumber);
3939 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3940 }
3941 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003942 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003943 } while (currentFrameNumber < frameNumber);
3944}
3945
3946/*===========================================================================
3947 * FUNCTION : notifyErrorFoPendingDepthData
3948 *
3949 * DESCRIPTION: Returns error for any pending depth buffers
3950 *
3951 * PARAMETERS : depthCh - depth channel that needs to get flushed
3952 *
3953 * RETURN :
3954 *
3955 *==========================================================================*/
3956void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3957 QCamera3DepthChannel *depthCh) {
3958 uint32_t currentFrameNumber;
3959 buffer_handle_t *depthBuffer;
3960
3961 if (nullptr == depthCh) {
3962 return;
3963 }
3964
3965 camera3_notify_msg_t notify_msg =
3966 {.type = CAMERA3_MSG_ERROR,
3967 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3968 camera3_stream_buffer_t resultBuffer =
3969 {.acquire_fence = -1,
3970 .release_fence = -1,
3971 .buffer = nullptr,
3972 .stream = depthCh->getStream(),
3973 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003974
3975 while (nullptr !=
3976 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3977 depthCh->unmapBuffer(currentFrameNumber);
3978
3979 notify_msg.message.error.frame_number = currentFrameNumber;
3980 orchestrateNotify(&notify_msg);
3981
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003982 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003983 };
3984}
3985
3986/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003987 * FUNCTION : hdrPlusPerfLock
3988 *
3989 * DESCRIPTION: perf lock for HDR+ using custom intent
3990 *
3991 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3992 *
3993 * RETURN : None
3994 *
3995 *==========================================================================*/
3996void QCamera3HardwareInterface::hdrPlusPerfLock(
3997 mm_camera_super_buf_t *metadata_buf)
3998{
3999 if (NULL == metadata_buf) {
4000 LOGE("metadata_buf is NULL");
4001 return;
4002 }
4003 metadata_buffer_t *metadata =
4004 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4005 int32_t *p_frame_number_valid =
4006 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4007 uint32_t *p_frame_number =
4008 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4009
4010 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4011 LOGE("%s: Invalid metadata", __func__);
4012 return;
4013 }
4014
4015 //acquire perf lock for 5 sec after the last HDR frame is captured
4016 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4017 if ((p_frame_number != NULL) &&
4018 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004019 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004020 }
4021 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004022}
4023
4024/*===========================================================================
4025 * FUNCTION : handleInputBufferWithLock
4026 *
4027 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4028 *
4029 * PARAMETERS : @frame_number: frame number of the input buffer
4030 *
4031 * RETURN :
4032 *
4033 *==========================================================================*/
4034void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4035{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004036 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004037 pendingRequestIterator i = mPendingRequestsList.begin();
4038 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4039 i++;
4040 }
4041 if (i != mPendingRequestsList.end() && i->input_buffer) {
4042 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004043 CameraMetadata settings;
4044 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4045 if(i->settings) {
4046 settings = i->settings;
4047 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4048 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004049 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004050 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004051 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004052 } else {
4053 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004054 }
4055
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004056 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4057 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4058 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004059
4060 camera3_capture_result result;
4061 memset(&result, 0, sizeof(camera3_capture_result));
4062 result.frame_number = frame_number;
4063 result.result = i->settings;
4064 result.input_buffer = i->input_buffer;
4065 result.partial_result = PARTIAL_RESULT_COUNT;
4066
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004067 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004068 LOGD("Input request metadata and input buffer frame_number = %u",
4069 i->frame_number);
4070 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004071
4072 // Dispatch result metadata that may be just unblocked by this reprocess result.
4073 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004074 } else {
4075 LOGE("Could not find input request for frame number %d", frame_number);
4076 }
4077}
4078
4079/*===========================================================================
4080 * FUNCTION : handleBufferWithLock
4081 *
4082 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4083 *
4084 * PARAMETERS : @buffer: image buffer for the callback
4085 * @frame_number: frame number of the image buffer
4086 *
4087 * RETURN :
4088 *
4089 *==========================================================================*/
4090void QCamera3HardwareInterface::handleBufferWithLock(
4091 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4092{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004093 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004094
4095 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4096 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4097 }
4098
Thierry Strudel3d639192016-09-09 11:52:26 -07004099 /* Nothing to be done during error state */
4100 if ((ERROR == mState) || (DEINIT == mState)) {
4101 return;
4102 }
4103 if (mFlushPerf) {
4104 handleBuffersDuringFlushLock(buffer);
4105 return;
4106 }
4107 //not in flush
4108 // If the frame number doesn't exist in the pending request list,
4109 // directly send the buffer to the frameworks, and update pending buffers map
4110 // Otherwise, book-keep the buffer.
4111 pendingRequestIterator i = mPendingRequestsList.begin();
4112 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4113 i++;
4114 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004115
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004116 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004117 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004118 // For a reprocessing request, try to send out result metadata.
4119 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004120 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004121 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004122
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004123 // Check if this frame was dropped.
4124 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4125 m != mPendingFrameDropList.end(); m++) {
4126 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4127 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4128 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4129 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4130 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4131 frame_number, streamID);
4132 m = mPendingFrameDropList.erase(m);
4133 break;
4134 }
4135 }
4136
4137 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4138 LOGH("result frame_number = %d, buffer = %p",
4139 frame_number, buffer->buffer);
4140
4141 mPendingBuffersMap.removeBuf(buffer->buffer);
4142 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4143
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004144 if (mPreviewStarted == false) {
4145 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4146 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004147 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4148
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004149 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4150 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4151 mPreviewStarted = true;
4152
4153 // Set power hint for preview
4154 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4155 }
4156 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004157}
4158
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004159void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004160 const camera_metadata_t *resultMetadata)
4161{
4162 // Find the pending request for this result metadata.
4163 auto requestIter = mPendingRequestsList.begin();
4164 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4165 requestIter++;
4166 }
4167
4168 if (requestIter == mPendingRequestsList.end()) {
4169 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4170 return;
4171 }
4172
4173 // Update the result metadata
4174 requestIter->resultMetadata = resultMetadata;
4175
4176 // Check what type of request this is.
4177 bool liveRequest = false;
4178 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004179 // HDR+ request doesn't have partial results.
4180 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004181 } else if (requestIter->input_buffer != nullptr) {
4182 // Reprocessing request result is the same as settings.
4183 requestIter->resultMetadata = requestIter->settings;
4184 // Reprocessing request doesn't have partial results.
4185 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4186 } else {
4187 liveRequest = true;
4188 requestIter->partial_result_cnt++;
4189 mPendingLiveRequest--;
4190
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004191 {
4192 Mutex::Autolock l(gHdrPlusClientLock);
4193 // For a live request, send the metadata to HDR+ client.
4194 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4195 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4196 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4197 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004198 }
4199 }
4200
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004201 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4202}
4203
4204void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4205 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004206 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4207 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004208 bool readyToSend = true;
4209
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004210 // Iterate through the pending requests to send out result metadata that are ready. Also if
4211 // this result metadata belongs to a live request, notify errors for previous live requests
4212 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004213 auto iter = mPendingRequestsList.begin();
4214 while (iter != mPendingRequestsList.end()) {
4215 // Check if current pending request is ready. If it's not ready, the following pending
4216 // requests are also not ready.
4217 if (readyToSend && iter->resultMetadata == nullptr) {
4218 readyToSend = false;
4219 }
4220
4221 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4222
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004223 camera3_capture_result_t result = {};
4224 result.frame_number = iter->frame_number;
4225 result.result = iter->resultMetadata;
4226 result.partial_result = iter->partial_result_cnt;
4227
4228 // If this pending buffer has result metadata, we may be able to send out shutter callback
4229 // and result metadata.
4230 if (iter->resultMetadata != nullptr) {
4231 if (!readyToSend) {
4232 // If any of the previous pending request is not ready, this pending request is
4233 // also not ready to send in order to keep shutter callbacks and result metadata
4234 // in order.
4235 iter++;
4236 continue;
4237 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004238 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004239 // If the result metadata belongs to a live request, notify errors for previous pending
4240 // live requests.
4241 mPendingLiveRequest--;
4242
4243 CameraMetadata dummyMetadata;
4244 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4245 result.result = dummyMetadata.release();
4246
4247 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004248
4249 // partial_result should be PARTIAL_RESULT_CNT in case of
4250 // ERROR_RESULT.
4251 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4252 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004253 } else {
4254 iter++;
4255 continue;
4256 }
4257
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004258 result.output_buffers = nullptr;
4259 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004260 orchestrateResult(&result);
4261
4262 // For reprocessing, result metadata is the same as settings so do not free it here to
4263 // avoid double free.
4264 if (result.result != iter->settings) {
4265 free_camera_metadata((camera_metadata_t *)result.result);
4266 }
4267 iter->resultMetadata = nullptr;
4268 iter = erasePendingRequest(iter);
4269 }
4270
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004271 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004272 for (auto &iter : mPendingRequestsList) {
4273 // Increment pipeline depth for the following pending requests.
4274 if (iter.frame_number > frameNumber) {
4275 iter.pipeline_depth++;
4276 }
4277 }
4278 }
4279
4280 unblockRequestIfNecessary();
4281}
4282
Thierry Strudel3d639192016-09-09 11:52:26 -07004283/*===========================================================================
4284 * FUNCTION : unblockRequestIfNecessary
4285 *
4286 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4287 * that mMutex is held when this function is called.
4288 *
4289 * PARAMETERS :
4290 *
4291 * RETURN :
4292 *
4293 *==========================================================================*/
4294void QCamera3HardwareInterface::unblockRequestIfNecessary()
4295{
4296 // Unblock process_capture_request
4297 pthread_cond_signal(&mRequestCond);
4298}
4299
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004300/*===========================================================================
4301 * FUNCTION : isHdrSnapshotRequest
4302 *
4303 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4304 *
4305 * PARAMETERS : camera3 request structure
4306 *
4307 * RETURN : boolean decision variable
4308 *
4309 *==========================================================================*/
4310bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4311{
4312 if (request == NULL) {
4313 LOGE("Invalid request handle");
4314 assert(0);
4315 return false;
4316 }
4317
4318 if (!mForceHdrSnapshot) {
4319 CameraMetadata frame_settings;
4320 frame_settings = request->settings;
4321
4322 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4323 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4324 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4325 return false;
4326 }
4327 } else {
4328 return false;
4329 }
4330
4331 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4332 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4333 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4334 return false;
4335 }
4336 } else {
4337 return false;
4338 }
4339 }
4340
4341 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4342 if (request->output_buffers[i].stream->format
4343 == HAL_PIXEL_FORMAT_BLOB) {
4344 return true;
4345 }
4346 }
4347
4348 return false;
4349}
4350/*===========================================================================
4351 * FUNCTION : orchestrateRequest
4352 *
4353 * DESCRIPTION: Orchestrates a capture request from camera service
4354 *
4355 * PARAMETERS :
4356 * @request : request from framework to process
4357 *
4358 * RETURN : Error status codes
4359 *
4360 *==========================================================================*/
4361int32_t QCamera3HardwareInterface::orchestrateRequest(
4362 camera3_capture_request_t *request)
4363{
4364
4365 uint32_t originalFrameNumber = request->frame_number;
4366 uint32_t originalOutputCount = request->num_output_buffers;
4367 const camera_metadata_t *original_settings = request->settings;
4368 List<InternalRequest> internallyRequestedStreams;
4369 List<InternalRequest> emptyInternalList;
4370
4371 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4372 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4373 uint32_t internalFrameNumber;
4374 CameraMetadata modified_meta;
4375
4376
4377 /* Add Blob channel to list of internally requested streams */
4378 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4379 if (request->output_buffers[i].stream->format
4380 == HAL_PIXEL_FORMAT_BLOB) {
4381 InternalRequest streamRequested;
4382 streamRequested.meteringOnly = 1;
4383 streamRequested.need_metadata = 0;
4384 streamRequested.stream = request->output_buffers[i].stream;
4385 internallyRequestedStreams.push_back(streamRequested);
4386 }
4387 }
4388 request->num_output_buffers = 0;
4389 auto itr = internallyRequestedStreams.begin();
4390
4391 /* Modify setting to set compensation */
4392 modified_meta = request->settings;
4393 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4394 uint8_t aeLock = 1;
4395 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4396 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4397 camera_metadata_t *modified_settings = modified_meta.release();
4398 request->settings = modified_settings;
4399
4400 /* Capture Settling & -2x frame */
4401 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4402 request->frame_number = internalFrameNumber;
4403 processCaptureRequest(request, internallyRequestedStreams);
4404
4405 request->num_output_buffers = originalOutputCount;
4406 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4407 request->frame_number = internalFrameNumber;
4408 processCaptureRequest(request, emptyInternalList);
4409 request->num_output_buffers = 0;
4410
4411 modified_meta = modified_settings;
4412 expCompensation = 0;
4413 aeLock = 1;
4414 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4415 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4416 modified_settings = modified_meta.release();
4417 request->settings = modified_settings;
4418
4419 /* Capture Settling & 0X frame */
4420
4421 itr = internallyRequestedStreams.begin();
4422 if (itr == internallyRequestedStreams.end()) {
4423 LOGE("Error Internally Requested Stream list is empty");
4424 assert(0);
4425 } else {
4426 itr->need_metadata = 0;
4427 itr->meteringOnly = 1;
4428 }
4429
4430 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4431 request->frame_number = internalFrameNumber;
4432 processCaptureRequest(request, internallyRequestedStreams);
4433
4434 itr = internallyRequestedStreams.begin();
4435 if (itr == internallyRequestedStreams.end()) {
4436 ALOGE("Error Internally Requested Stream list is empty");
4437 assert(0);
4438 } else {
4439 itr->need_metadata = 1;
4440 itr->meteringOnly = 0;
4441 }
4442
4443 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4444 request->frame_number = internalFrameNumber;
4445 processCaptureRequest(request, internallyRequestedStreams);
4446
4447 /* Capture 2X frame*/
4448 modified_meta = modified_settings;
4449 expCompensation = GB_HDR_2X_STEP_EV;
4450 aeLock = 1;
4451 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4452 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4453 modified_settings = modified_meta.release();
4454 request->settings = modified_settings;
4455
4456 itr = internallyRequestedStreams.begin();
4457 if (itr == internallyRequestedStreams.end()) {
4458 ALOGE("Error Internally Requested Stream list is empty");
4459 assert(0);
4460 } else {
4461 itr->need_metadata = 0;
4462 itr->meteringOnly = 1;
4463 }
4464 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4465 request->frame_number = internalFrameNumber;
4466 processCaptureRequest(request, internallyRequestedStreams);
4467
4468 itr = internallyRequestedStreams.begin();
4469 if (itr == internallyRequestedStreams.end()) {
4470 ALOGE("Error Internally Requested Stream list is empty");
4471 assert(0);
4472 } else {
4473 itr->need_metadata = 1;
4474 itr->meteringOnly = 0;
4475 }
4476
4477 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4478 request->frame_number = internalFrameNumber;
4479 processCaptureRequest(request, internallyRequestedStreams);
4480
4481
4482 /* Capture 2X on original streaming config*/
4483 internallyRequestedStreams.clear();
4484
4485 /* Restore original settings pointer */
4486 request->settings = original_settings;
4487 } else {
4488 uint32_t internalFrameNumber;
4489 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4490 request->frame_number = internalFrameNumber;
4491 return processCaptureRequest(request, internallyRequestedStreams);
4492 }
4493
4494 return NO_ERROR;
4495}
4496
4497/*===========================================================================
4498 * FUNCTION : orchestrateResult
4499 *
4500 * DESCRIPTION: Orchestrates a capture result to camera service
4501 *
4502 * PARAMETERS :
4503 * @request : request from framework to process
4504 *
4505 * RETURN :
4506 *
4507 *==========================================================================*/
4508void QCamera3HardwareInterface::orchestrateResult(
4509 camera3_capture_result_t *result)
4510{
4511 uint32_t frameworkFrameNumber;
4512 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4513 frameworkFrameNumber);
4514 if (rc != NO_ERROR) {
4515 LOGE("Cannot find translated frameworkFrameNumber");
4516 assert(0);
4517 } else {
4518 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004519 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004520 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004521 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004522 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4523 camera_metadata_entry_t entry;
4524 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4525 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004526 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004527 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4528 if (ret != OK)
4529 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004530 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004531 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004532 result->frame_number = frameworkFrameNumber;
4533 mCallbackOps->process_capture_result(mCallbackOps, result);
4534 }
4535 }
4536}
4537
4538/*===========================================================================
4539 * FUNCTION : orchestrateNotify
4540 *
4541 * DESCRIPTION: Orchestrates a notify to camera service
4542 *
4543 * PARAMETERS :
4544 * @request : request from framework to process
4545 *
4546 * RETURN :
4547 *
4548 *==========================================================================*/
4549void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4550{
4551 uint32_t frameworkFrameNumber;
4552 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004553 int32_t rc = NO_ERROR;
4554
4555 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004556 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004557
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004558 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004559 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4560 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4561 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004562 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004563 LOGE("Cannot find translated frameworkFrameNumber");
4564 assert(0);
4565 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004566 }
4567 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004568
4569 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4570 LOGD("Internal Request drop the notifyCb");
4571 } else {
4572 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4573 mCallbackOps->notify(mCallbackOps, notify_msg);
4574 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004575}
4576
4577/*===========================================================================
4578 * FUNCTION : FrameNumberRegistry
4579 *
4580 * DESCRIPTION: Constructor
4581 *
4582 * PARAMETERS :
4583 *
4584 * RETURN :
4585 *
4586 *==========================================================================*/
4587FrameNumberRegistry::FrameNumberRegistry()
4588{
4589 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4590}
4591
4592/*===========================================================================
4593 * FUNCTION : ~FrameNumberRegistry
4594 *
4595 * DESCRIPTION: Destructor
4596 *
4597 * PARAMETERS :
4598 *
4599 * RETURN :
4600 *
4601 *==========================================================================*/
4602FrameNumberRegistry::~FrameNumberRegistry()
4603{
4604}
4605
4606/*===========================================================================
4607 * FUNCTION : PurgeOldEntriesLocked
4608 *
4609 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4610 *
4611 * PARAMETERS :
4612 *
4613 * RETURN : NONE
4614 *
4615 *==========================================================================*/
4616void FrameNumberRegistry::purgeOldEntriesLocked()
4617{
4618 while (_register.begin() != _register.end()) {
4619 auto itr = _register.begin();
4620 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4621 _register.erase(itr);
4622 } else {
4623 return;
4624 }
4625 }
4626}
4627
4628/*===========================================================================
4629 * FUNCTION : allocStoreInternalFrameNumber
4630 *
4631 * DESCRIPTION: Method to note down a framework request and associate a new
4632 * internal request number against it
4633 *
4634 * PARAMETERS :
4635 * @fFrameNumber: Identifier given by framework
4636 * @internalFN : Output parameter which will have the newly generated internal
4637 * entry
4638 *
4639 * RETURN : Error code
4640 *
4641 *==========================================================================*/
4642int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4643 uint32_t &internalFrameNumber)
4644{
4645 Mutex::Autolock lock(mRegistryLock);
4646 internalFrameNumber = _nextFreeInternalNumber++;
4647 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4648 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4649 purgeOldEntriesLocked();
4650 return NO_ERROR;
4651}
4652
4653/*===========================================================================
4654 * FUNCTION : generateStoreInternalFrameNumber
4655 *
4656 * DESCRIPTION: Method to associate a new internal request number independent
4657 * of any associate with framework requests
4658 *
4659 * PARAMETERS :
4660 * @internalFrame#: Output parameter which will have the newly generated internal
4661 *
4662 *
4663 * RETURN : Error code
4664 *
4665 *==========================================================================*/
4666int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4667{
4668 Mutex::Autolock lock(mRegistryLock);
4669 internalFrameNumber = _nextFreeInternalNumber++;
4670 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4671 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4672 purgeOldEntriesLocked();
4673 return NO_ERROR;
4674}
4675
4676/*===========================================================================
4677 * FUNCTION : getFrameworkFrameNumber
4678 *
4679 * DESCRIPTION: Method to query the framework framenumber given an internal #
4680 *
4681 * PARAMETERS :
4682 * @internalFrame#: Internal reference
4683 * @frameworkframenumber: Output parameter holding framework frame entry
4684 *
4685 * RETURN : Error code
4686 *
4687 *==========================================================================*/
4688int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4689 uint32_t &frameworkFrameNumber)
4690{
4691 Mutex::Autolock lock(mRegistryLock);
4692 auto itr = _register.find(internalFrameNumber);
4693 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004694 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004695 return -ENOENT;
4696 }
4697
4698 frameworkFrameNumber = itr->second;
4699 purgeOldEntriesLocked();
4700 return NO_ERROR;
4701}
Thierry Strudel3d639192016-09-09 11:52:26 -07004702
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004703status_t QCamera3HardwareInterface::fillPbStreamConfig(
4704 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4705 QCamera3Channel *channel, uint32_t streamIndex) {
4706 if (config == nullptr) {
4707 LOGE("%s: config is null", __FUNCTION__);
4708 return BAD_VALUE;
4709 }
4710
4711 if (channel == nullptr) {
4712 LOGE("%s: channel is null", __FUNCTION__);
4713 return BAD_VALUE;
4714 }
4715
4716 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4717 if (stream == nullptr) {
4718 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4719 return NAME_NOT_FOUND;
4720 }
4721
4722 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4723 if (streamInfo == nullptr) {
4724 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4725 return NAME_NOT_FOUND;
4726 }
4727
4728 config->id = pbStreamId;
4729 config->image.width = streamInfo->dim.width;
4730 config->image.height = streamInfo->dim.height;
4731 config->image.padding = 0;
4732 config->image.format = pbStreamFormat;
4733
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004734 uint32_t totalPlaneSize = 0;
4735
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004736 // Fill plane information.
4737 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4738 pbcamera::PlaneConfiguration plane;
4739 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4740 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4741 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004742
4743 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004744 }
4745
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004746 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004747 return OK;
4748}
4749
Thierry Strudel3d639192016-09-09 11:52:26 -07004750/*===========================================================================
4751 * FUNCTION : processCaptureRequest
4752 *
4753 * DESCRIPTION: process a capture request from camera service
4754 *
4755 * PARAMETERS :
4756 * @request : request from framework to process
4757 *
4758 * RETURN :
4759 *
4760 *==========================================================================*/
4761int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004762 camera3_capture_request_t *request,
4763 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004764{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004765 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004766 int rc = NO_ERROR;
4767 int32_t request_id;
4768 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004769 bool isVidBufRequested = false;
4770 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004771 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004772
4773 pthread_mutex_lock(&mMutex);
4774
4775 // Validate current state
4776 switch (mState) {
4777 case CONFIGURED:
4778 case STARTED:
4779 /* valid state */
4780 break;
4781
4782 case ERROR:
4783 pthread_mutex_unlock(&mMutex);
4784 handleCameraDeviceError();
4785 return -ENODEV;
4786
4787 default:
4788 LOGE("Invalid state %d", mState);
4789 pthread_mutex_unlock(&mMutex);
4790 return -ENODEV;
4791 }
4792
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004793 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004794 if (rc != NO_ERROR) {
4795 LOGE("incoming request is not valid");
4796 pthread_mutex_unlock(&mMutex);
4797 return rc;
4798 }
4799
4800 meta = request->settings;
4801
4802 // For first capture request, send capture intent, and
4803 // stream on all streams
4804 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004805 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004806 // send an unconfigure to the backend so that the isp
4807 // resources are deallocated
4808 if (!mFirstConfiguration) {
4809 cam_stream_size_info_t stream_config_info;
4810 int32_t hal_version = CAM_HAL_V3;
4811 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4812 stream_config_info.buffer_info.min_buffers =
4813 MIN_INFLIGHT_REQUESTS;
4814 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004815 m_bIs4KVideo ? 0 :
4816 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004817 clear_metadata_buffer(mParameters);
4818 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4819 CAM_INTF_PARM_HAL_VERSION, hal_version);
4820 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4821 CAM_INTF_META_STREAM_INFO, stream_config_info);
4822 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4823 mParameters);
4824 if (rc < 0) {
4825 LOGE("set_parms for unconfigure failed");
4826 pthread_mutex_unlock(&mMutex);
4827 return rc;
4828 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004829
Thierry Strudel3d639192016-09-09 11:52:26 -07004830 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004831 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004833 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004834 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004835 property_get("persist.camera.is_type", is_type_value, "4");
4836 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4837 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4838 property_get("persist.camera.is_type_preview", is_type_value, "4");
4839 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4840 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004841
4842 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4843 int32_t hal_version = CAM_HAL_V3;
4844 uint8_t captureIntent =
4845 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4846 mCaptureIntent = captureIntent;
4847 clear_metadata_buffer(mParameters);
4848 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4849 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4850 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004851 if (mFirstConfiguration) {
4852 // configure instant AEC
4853 // Instant AEC is a session based parameter and it is needed only
4854 // once per complete session after open camera.
4855 // i.e. This is set only once for the first capture request, after open camera.
4856 setInstantAEC(meta);
4857 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004858 uint8_t fwkVideoStabMode=0;
4859 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4860 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4861 }
4862
Xue Tuecac74e2017-04-17 13:58:15 -07004863 // If EIS setprop is enabled then only turn it on for video/preview
4864 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004865 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 int32_t vsMode;
4867 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4869 rc = BAD_VALUE;
4870 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004871 LOGD("setEis %d", setEis);
4872 bool eis3Supported = false;
4873 size_t count = IS_TYPE_MAX;
4874 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4875 for (size_t i = 0; i < count; i++) {
4876 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4877 eis3Supported = true;
4878 break;
4879 }
4880 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004881
4882 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004883 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004884 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4885 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004886 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4887 is_type = isTypePreview;
4888 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4889 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4890 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004891 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004892 } else {
4893 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004894 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004895 } else {
4896 is_type = IS_TYPE_NONE;
4897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004898 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004899 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004900 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4901 }
4902 }
4903
4904 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4905 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4906
Thierry Strudel54dc9782017-02-15 12:12:10 -08004907 //Disable tintless only if the property is set to 0
4908 memset(prop, 0, sizeof(prop));
4909 property_get("persist.camera.tintless.enable", prop, "1");
4910 int32_t tintless_value = atoi(prop);
4911
Thierry Strudel3d639192016-09-09 11:52:26 -07004912 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4913 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004914
Thierry Strudel3d639192016-09-09 11:52:26 -07004915 //Disable CDS for HFR mode or if DIS/EIS is on.
4916 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4917 //after every configure_stream
4918 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4919 (m_bIsVideo)) {
4920 int32_t cds = CAM_CDS_MODE_OFF;
4921 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4922 CAM_INTF_PARM_CDS_MODE, cds))
4923 LOGE("Failed to disable CDS for HFR mode");
4924
4925 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004926
4927 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4928 uint8_t* use_av_timer = NULL;
4929
4930 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004931 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004932 use_av_timer = &m_debug_avtimer;
4933 }
4934 else{
4935 use_av_timer =
4936 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004937 if (use_av_timer) {
4938 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4939 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004940 }
4941
4942 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4943 rc = BAD_VALUE;
4944 }
4945 }
4946
Thierry Strudel3d639192016-09-09 11:52:26 -07004947 setMobicat();
4948
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004949 uint8_t nrMode = 0;
4950 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4951 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4952 }
4953
Thierry Strudel3d639192016-09-09 11:52:26 -07004954 /* Set fps and hfr mode while sending meta stream info so that sensor
4955 * can configure appropriate streaming mode */
4956 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004957 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4958 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4960 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004961 if (rc == NO_ERROR) {
4962 int32_t max_fps =
4963 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004964 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004965 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4966 }
4967 /* For HFR, more buffers are dequeued upfront to improve the performance */
4968 if (mBatchSize) {
4969 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4970 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4971 }
4972 }
4973 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004974 LOGE("setHalFpsRange failed");
4975 }
4976 }
4977 if (meta.exists(ANDROID_CONTROL_MODE)) {
4978 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4979 rc = extractSceneMode(meta, metaMode, mParameters);
4980 if (rc != NO_ERROR) {
4981 LOGE("extractSceneMode failed");
4982 }
4983 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004984 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004985
Thierry Strudel04e026f2016-10-10 11:27:36 -07004986 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4987 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4988 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4989 rc = setVideoHdrMode(mParameters, vhdr);
4990 if (rc != NO_ERROR) {
4991 LOGE("setVideoHDR is failed");
4992 }
4993 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004994
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004995 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004996 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004997 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004998 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4999 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5000 sensorModeFullFov)) {
5001 rc = BAD_VALUE;
5002 }
5003 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 //TODO: validate the arguments, HSV scenemode should have only the
5005 //advertised fps ranges
5006
5007 /*set the capture intent, hal version, tintless, stream info,
5008 *and disenable parameters to the backend*/
5009 LOGD("set_parms META_STREAM_INFO " );
5010 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005011 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5012 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005013 mStreamConfigInfo.type[i],
5014 mStreamConfigInfo.stream_sizes[i].width,
5015 mStreamConfigInfo.stream_sizes[i].height,
5016 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005017 mStreamConfigInfo.format[i],
5018 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005019 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005020
Thierry Strudel3d639192016-09-09 11:52:26 -07005021 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5022 mParameters);
5023 if (rc < 0) {
5024 LOGE("set_parms failed for hal version, stream info");
5025 }
5026
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005027 cam_sensor_mode_info_t sensorModeInfo = {};
5028 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005029 if (rc != NO_ERROR) {
5030 LOGE("Failed to get sensor output size");
5031 pthread_mutex_unlock(&mMutex);
5032 goto error_exit;
5033 }
5034
5035 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5036 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005037 sensorModeInfo.active_array_size.width,
5038 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005039
5040 /* Set batchmode before initializing channel. Since registerBuffer
5041 * internally initializes some of the channels, better set batchmode
5042 * even before first register buffer */
5043 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5044 it != mStreamInfo.end(); it++) {
5045 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5046 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5047 && mBatchSize) {
5048 rc = channel->setBatchSize(mBatchSize);
5049 //Disable per frame map unmap for HFR/batchmode case
5050 rc |= channel->setPerFrameMapUnmap(false);
5051 if (NO_ERROR != rc) {
5052 LOGE("Channel init failed %d", rc);
5053 pthread_mutex_unlock(&mMutex);
5054 goto error_exit;
5055 }
5056 }
5057 }
5058
5059 //First initialize all streams
5060 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5061 it != mStreamInfo.end(); it++) {
5062 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005063
5064 /* Initial value of NR mode is needed before stream on */
5065 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005066 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5067 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005068 setEis) {
5069 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5070 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5071 is_type = mStreamConfigInfo.is_type[i];
5072 break;
5073 }
5074 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005075 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005076 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005077 rc = channel->initialize(IS_TYPE_NONE);
5078 }
5079 if (NO_ERROR != rc) {
5080 LOGE("Channel initialization failed %d", rc);
5081 pthread_mutex_unlock(&mMutex);
5082 goto error_exit;
5083 }
5084 }
5085
5086 if (mRawDumpChannel) {
5087 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5088 if (rc != NO_ERROR) {
5089 LOGE("Error: Raw Dump Channel init failed");
5090 pthread_mutex_unlock(&mMutex);
5091 goto error_exit;
5092 }
5093 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005094 if (mHdrPlusRawSrcChannel) {
5095 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5096 if (rc != NO_ERROR) {
5097 LOGE("Error: HDR+ RAW Source Channel init failed");
5098 pthread_mutex_unlock(&mMutex);
5099 goto error_exit;
5100 }
5101 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005102 if (mSupportChannel) {
5103 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5104 if (rc < 0) {
5105 LOGE("Support channel initialization failed");
5106 pthread_mutex_unlock(&mMutex);
5107 goto error_exit;
5108 }
5109 }
5110 if (mAnalysisChannel) {
5111 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5112 if (rc < 0) {
5113 LOGE("Analysis channel initialization failed");
5114 pthread_mutex_unlock(&mMutex);
5115 goto error_exit;
5116 }
5117 }
5118 if (mDummyBatchChannel) {
5119 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5120 if (rc < 0) {
5121 LOGE("mDummyBatchChannel setBatchSize failed");
5122 pthread_mutex_unlock(&mMutex);
5123 goto error_exit;
5124 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005125 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005126 if (rc < 0) {
5127 LOGE("mDummyBatchChannel initialization failed");
5128 pthread_mutex_unlock(&mMutex);
5129 goto error_exit;
5130 }
5131 }
5132
5133 // Set bundle info
5134 rc = setBundleInfo();
5135 if (rc < 0) {
5136 LOGE("setBundleInfo failed %d", rc);
5137 pthread_mutex_unlock(&mMutex);
5138 goto error_exit;
5139 }
5140
5141 //update settings from app here
5142 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5143 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5144 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5145 }
5146 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5147 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5148 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5149 }
5150 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5151 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5152 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5153
5154 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5155 (mLinkedCameraId != mCameraId) ) {
5156 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5157 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005158 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005159 goto error_exit;
5160 }
5161 }
5162
5163 // add bundle related cameras
5164 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5165 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005166 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5167 &m_pDualCamCmdPtr->bundle_info;
5168 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005169 if (mIsDeviceLinked)
5170 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5171 else
5172 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5173
5174 pthread_mutex_lock(&gCamLock);
5175
5176 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5177 LOGE("Dualcam: Invalid Session Id ");
5178 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005179 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005180 goto error_exit;
5181 }
5182
5183 if (mIsMainCamera == 1) {
5184 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5185 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005186 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005187 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005188 // related session id should be session id of linked session
5189 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5190 } else {
5191 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5192 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005193 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005194 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005195 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5196 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005197 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005198 pthread_mutex_unlock(&gCamLock);
5199
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005200 rc = mCameraHandle->ops->set_dual_cam_cmd(
5201 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005202 if (rc < 0) {
5203 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005204 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005205 goto error_exit;
5206 }
5207 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005208 goto no_error;
5209error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005210 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005211 return rc;
5212no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005213 mWokenUpByDaemon = false;
5214 mPendingLiveRequest = 0;
5215 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005216 }
5217
5218 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005219 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005220
5221 if (mFlushPerf) {
5222 //we cannot accept any requests during flush
5223 LOGE("process_capture_request cannot proceed during flush");
5224 pthread_mutex_unlock(&mMutex);
5225 return NO_ERROR; //should return an error
5226 }
5227
5228 if (meta.exists(ANDROID_REQUEST_ID)) {
5229 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5230 mCurrentRequestId = request_id;
5231 LOGD("Received request with id: %d", request_id);
5232 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5233 LOGE("Unable to find request id field, \
5234 & no previous id available");
5235 pthread_mutex_unlock(&mMutex);
5236 return NAME_NOT_FOUND;
5237 } else {
5238 LOGD("Re-using old request id");
5239 request_id = mCurrentRequestId;
5240 }
5241
5242 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5243 request->num_output_buffers,
5244 request->input_buffer,
5245 frameNumber);
5246 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005247 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005248 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005249 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005250 uint32_t snapshotStreamId = 0;
5251 for (size_t i = 0; i < request->num_output_buffers; i++) {
5252 const camera3_stream_buffer_t& output = request->output_buffers[i];
5253 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5254
Emilian Peev7650c122017-01-19 08:24:33 -08005255 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5256 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005257 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005258 blob_request = 1;
5259 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5260 }
5261
5262 if (output.acquire_fence != -1) {
5263 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5264 close(output.acquire_fence);
5265 if (rc != OK) {
5266 LOGE("sync wait failed %d", rc);
5267 pthread_mutex_unlock(&mMutex);
5268 return rc;
5269 }
5270 }
5271
Emilian Peev0f3c3162017-03-15 12:57:46 +00005272 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5273 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005274 depthRequestPresent = true;
5275 continue;
5276 }
5277
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005278 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005279 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005280
5281 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5282 isVidBufRequested = true;
5283 }
5284 }
5285
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005286 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5287 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5288 itr++) {
5289 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5290 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5291 channel->getStreamID(channel->getStreamTypeMask());
5292
5293 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5294 isVidBufRequested = true;
5295 }
5296 }
5297
Thierry Strudel3d639192016-09-09 11:52:26 -07005298 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005299 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005300 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005301 }
5302 if (blob_request && mRawDumpChannel) {
5303 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005304 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005305 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005306 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005307 }
5308
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005309 {
5310 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5311 // Request a RAW buffer if
5312 // 1. mHdrPlusRawSrcChannel is valid.
5313 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5314 // 3. There is no pending HDR+ request.
5315 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5316 mHdrPlusPendingRequests.size() == 0) {
5317 streamsArray.stream_request[streamsArray.num_streams].streamID =
5318 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5319 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5320 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005321 }
5322
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005323 //extract capture intent
5324 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5325 mCaptureIntent =
5326 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5327 }
5328
5329 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5330 mCacMode =
5331 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5332 }
5333
5334 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005335 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005336
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005337 {
5338 Mutex::Autolock l(gHdrPlusClientLock);
5339 // If this request has a still capture intent, try to submit an HDR+ request.
5340 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5341 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5342 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5343 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005344 }
5345
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005346 if (hdrPlusRequest) {
5347 // For a HDR+ request, just set the frame parameters.
5348 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5349 if (rc < 0) {
5350 LOGE("fail to set frame parameters");
5351 pthread_mutex_unlock(&mMutex);
5352 return rc;
5353 }
5354 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005355 /* Parse the settings:
5356 * - For every request in NORMAL MODE
5357 * - For every request in HFR mode during preview only case
5358 * - For first request of every batch in HFR mode during video
5359 * recording. In batchmode the same settings except frame number is
5360 * repeated in each request of the batch.
5361 */
5362 if (!mBatchSize ||
5363 (mBatchSize && !isVidBufRequested) ||
5364 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005365 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005366 if (rc < 0) {
5367 LOGE("fail to set frame parameters");
5368 pthread_mutex_unlock(&mMutex);
5369 return rc;
5370 }
5371 }
5372 /* For batchMode HFR, setFrameParameters is not called for every
5373 * request. But only frame number of the latest request is parsed.
5374 * Keep track of first and last frame numbers in a batch so that
5375 * metadata for the frame numbers of batch can be duplicated in
5376 * handleBatchMetadta */
5377 if (mBatchSize) {
5378 if (!mToBeQueuedVidBufs) {
5379 //start of the batch
5380 mFirstFrameNumberInBatch = request->frame_number;
5381 }
5382 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5383 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5384 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005385 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005386 return BAD_VALUE;
5387 }
5388 }
5389 if (mNeedSensorRestart) {
5390 /* Unlock the mutex as restartSensor waits on the channels to be
5391 * stopped, which in turn calls stream callback functions -
5392 * handleBufferWithLock and handleMetadataWithLock */
5393 pthread_mutex_unlock(&mMutex);
5394 rc = dynamicUpdateMetaStreamInfo();
5395 if (rc != NO_ERROR) {
5396 LOGE("Restarting the sensor failed");
5397 return BAD_VALUE;
5398 }
5399 mNeedSensorRestart = false;
5400 pthread_mutex_lock(&mMutex);
5401 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005402 if(mResetInstantAEC) {
5403 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5404 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5405 mResetInstantAEC = false;
5406 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005407 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005408 if (request->input_buffer->acquire_fence != -1) {
5409 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5410 close(request->input_buffer->acquire_fence);
5411 if (rc != OK) {
5412 LOGE("input buffer sync wait failed %d", rc);
5413 pthread_mutex_unlock(&mMutex);
5414 return rc;
5415 }
5416 }
5417 }
5418
5419 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5420 mLastCustIntentFrmNum = frameNumber;
5421 }
5422 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005423 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005424 pendingRequestIterator latestRequest;
5425 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005426 pendingRequest.num_buffers = depthRequestPresent ?
5427 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005428 pendingRequest.request_id = request_id;
5429 pendingRequest.blob_request = blob_request;
5430 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005431 if (request->input_buffer) {
5432 pendingRequest.input_buffer =
5433 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5434 *(pendingRequest.input_buffer) = *(request->input_buffer);
5435 pInputBuffer = pendingRequest.input_buffer;
5436 } else {
5437 pendingRequest.input_buffer = NULL;
5438 pInputBuffer = NULL;
5439 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005440 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005441
5442 pendingRequest.pipeline_depth = 0;
5443 pendingRequest.partial_result_cnt = 0;
5444 extractJpegMetadata(mCurJpegMeta, request);
5445 pendingRequest.jpegMetadata = mCurJpegMeta;
5446 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005447 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005448 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5449 mHybridAeEnable =
5450 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5451 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005452
5453 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5454 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005455 /* DevCamDebug metadata processCaptureRequest */
5456 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5457 mDevCamDebugMetaEnable =
5458 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5459 }
5460 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5461 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005462
5463 //extract CAC info
5464 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5465 mCacMode =
5466 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5467 }
5468 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005469 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005470
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005471 // extract enableZsl info
5472 if (gExposeEnableZslKey) {
5473 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5474 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5475 mZslEnabled = pendingRequest.enableZsl;
5476 } else {
5477 pendingRequest.enableZsl = mZslEnabled;
5478 }
5479 }
5480
Thierry Strudel3d639192016-09-09 11:52:26 -07005481 PendingBuffersInRequest bufsForCurRequest;
5482 bufsForCurRequest.frame_number = frameNumber;
5483 // Mark current timestamp for the new request
5484 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005485 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005486
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005487 if (hdrPlusRequest) {
5488 // Save settings for this request.
5489 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5490 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5491
5492 // Add to pending HDR+ request queue.
5493 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5494 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5495
5496 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5497 }
5498
Thierry Strudel3d639192016-09-09 11:52:26 -07005499 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005500 if ((request->output_buffers[i].stream->data_space ==
5501 HAL_DATASPACE_DEPTH) &&
5502 (HAL_PIXEL_FORMAT_BLOB ==
5503 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005504 continue;
5505 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005506 RequestedBufferInfo requestedBuf;
5507 memset(&requestedBuf, 0, sizeof(requestedBuf));
5508 requestedBuf.stream = request->output_buffers[i].stream;
5509 requestedBuf.buffer = NULL;
5510 pendingRequest.buffers.push_back(requestedBuf);
5511
5512 // Add to buffer handle the pending buffers list
5513 PendingBufferInfo bufferInfo;
5514 bufferInfo.buffer = request->output_buffers[i].buffer;
5515 bufferInfo.stream = request->output_buffers[i].stream;
5516 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5517 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5518 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5519 frameNumber, bufferInfo.buffer,
5520 channel->getStreamTypeMask(), bufferInfo.stream->format);
5521 }
5522 // Add this request packet into mPendingBuffersMap
5523 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5524 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5525 mPendingBuffersMap.get_num_overall_buffers());
5526
5527 latestRequest = mPendingRequestsList.insert(
5528 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005529
5530 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5531 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005532 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005533 for (size_t i = 0; i < request->num_output_buffers; i++) {
5534 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5535 }
5536
Thierry Strudel3d639192016-09-09 11:52:26 -07005537 if(mFlush) {
5538 LOGI("mFlush is true");
5539 pthread_mutex_unlock(&mMutex);
5540 return NO_ERROR;
5541 }
5542
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005543 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5544 // channel.
5545 if (!hdrPlusRequest) {
5546 int indexUsed;
5547 // Notify metadata channel we receive a request
5548 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005549
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005550 if(request->input_buffer != NULL){
5551 LOGD("Input request, frame_number %d", frameNumber);
5552 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5553 if (NO_ERROR != rc) {
5554 LOGE("fail to set reproc parameters");
5555 pthread_mutex_unlock(&mMutex);
5556 return rc;
5557 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005558 }
5559
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005560 // Call request on other streams
5561 uint32_t streams_need_metadata = 0;
5562 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5563 for (size_t i = 0; i < request->num_output_buffers; i++) {
5564 const camera3_stream_buffer_t& output = request->output_buffers[i];
5565 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5566
5567 if (channel == NULL) {
5568 LOGW("invalid channel pointer for stream");
5569 continue;
5570 }
5571
5572 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5573 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5574 output.buffer, request->input_buffer, frameNumber);
5575 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005576 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005577 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5578 if (rc < 0) {
5579 LOGE("Fail to request on picture channel");
5580 pthread_mutex_unlock(&mMutex);
5581 return rc;
5582 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005583 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005584 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5585 assert(NULL != mDepthChannel);
5586 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005587
Emilian Peev7650c122017-01-19 08:24:33 -08005588 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5589 if (rc < 0) {
5590 LOGE("Fail to map on depth buffer");
5591 pthread_mutex_unlock(&mMutex);
5592 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005593 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005594 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005595 } else {
5596 LOGD("snapshot request with buffer %p, frame_number %d",
5597 output.buffer, frameNumber);
5598 if (!request->settings) {
5599 rc = channel->request(output.buffer, frameNumber,
5600 NULL, mPrevParameters, indexUsed);
5601 } else {
5602 rc = channel->request(output.buffer, frameNumber,
5603 NULL, mParameters, indexUsed);
5604 }
5605 if (rc < 0) {
5606 LOGE("Fail to request on picture channel");
5607 pthread_mutex_unlock(&mMutex);
5608 return rc;
5609 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005610
Emilian Peev7650c122017-01-19 08:24:33 -08005611 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5612 uint32_t j = 0;
5613 for (j = 0; j < streamsArray.num_streams; j++) {
5614 if (streamsArray.stream_request[j].streamID == streamId) {
5615 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5616 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5617 else
5618 streamsArray.stream_request[j].buf_index = indexUsed;
5619 break;
5620 }
5621 }
5622 if (j == streamsArray.num_streams) {
5623 LOGE("Did not find matching stream to update index");
5624 assert(0);
5625 }
5626
5627 pendingBufferIter->need_metadata = true;
5628 streams_need_metadata++;
5629 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005630 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005631 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5632 bool needMetadata = false;
5633 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5634 rc = yuvChannel->request(output.buffer, frameNumber,
5635 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5636 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005637 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005638 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005639 pthread_mutex_unlock(&mMutex);
5640 return rc;
5641 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005642
5643 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5644 uint32_t j = 0;
5645 for (j = 0; j < streamsArray.num_streams; j++) {
5646 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005647 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5648 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5649 else
5650 streamsArray.stream_request[j].buf_index = indexUsed;
5651 break;
5652 }
5653 }
5654 if (j == streamsArray.num_streams) {
5655 LOGE("Did not find matching stream to update index");
5656 assert(0);
5657 }
5658
5659 pendingBufferIter->need_metadata = needMetadata;
5660 if (needMetadata)
5661 streams_need_metadata += 1;
5662 LOGD("calling YUV channel request, need_metadata is %d",
5663 needMetadata);
5664 } else {
5665 LOGD("request with buffer %p, frame_number %d",
5666 output.buffer, frameNumber);
5667
5668 rc = channel->request(output.buffer, frameNumber, indexUsed);
5669
5670 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5671 uint32_t j = 0;
5672 for (j = 0; j < streamsArray.num_streams; j++) {
5673 if (streamsArray.stream_request[j].streamID == streamId) {
5674 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5675 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5676 else
5677 streamsArray.stream_request[j].buf_index = indexUsed;
5678 break;
5679 }
5680 }
5681 if (j == streamsArray.num_streams) {
5682 LOGE("Did not find matching stream to update index");
5683 assert(0);
5684 }
5685
5686 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5687 && mBatchSize) {
5688 mToBeQueuedVidBufs++;
5689 if (mToBeQueuedVidBufs == mBatchSize) {
5690 channel->queueBatchBuf();
5691 }
5692 }
5693 if (rc < 0) {
5694 LOGE("request failed");
5695 pthread_mutex_unlock(&mMutex);
5696 return rc;
5697 }
5698 }
5699 pendingBufferIter++;
5700 }
5701
5702 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5703 itr++) {
5704 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5705
5706 if (channel == NULL) {
5707 LOGE("invalid channel pointer for stream");
5708 assert(0);
5709 return BAD_VALUE;
5710 }
5711
5712 InternalRequest requestedStream;
5713 requestedStream = (*itr);
5714
5715
5716 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5717 LOGD("snapshot request internally input buffer %p, frame_number %d",
5718 request->input_buffer, frameNumber);
5719 if(request->input_buffer != NULL){
5720 rc = channel->request(NULL, frameNumber,
5721 pInputBuffer, &mReprocMeta, indexUsed, true,
5722 requestedStream.meteringOnly);
5723 if (rc < 0) {
5724 LOGE("Fail to request on picture channel");
5725 pthread_mutex_unlock(&mMutex);
5726 return rc;
5727 }
5728 } else {
5729 LOGD("snapshot request with frame_number %d", frameNumber);
5730 if (!request->settings) {
5731 rc = channel->request(NULL, frameNumber,
5732 NULL, mPrevParameters, indexUsed, true,
5733 requestedStream.meteringOnly);
5734 } else {
5735 rc = channel->request(NULL, frameNumber,
5736 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5737 }
5738 if (rc < 0) {
5739 LOGE("Fail to request on picture channel");
5740 pthread_mutex_unlock(&mMutex);
5741 return rc;
5742 }
5743
5744 if ((*itr).meteringOnly != 1) {
5745 requestedStream.need_metadata = 1;
5746 streams_need_metadata++;
5747 }
5748 }
5749
5750 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5751 uint32_t j = 0;
5752 for (j = 0; j < streamsArray.num_streams; j++) {
5753 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005754 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5755 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5756 else
5757 streamsArray.stream_request[j].buf_index = indexUsed;
5758 break;
5759 }
5760 }
5761 if (j == streamsArray.num_streams) {
5762 LOGE("Did not find matching stream to update index");
5763 assert(0);
5764 }
5765
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005766 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005767 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005768 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005769 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005770 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005771 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005772 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005773
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005774 //If 2 streams have need_metadata set to true, fail the request, unless
5775 //we copy/reference count the metadata buffer
5776 if (streams_need_metadata > 1) {
5777 LOGE("not supporting request in which two streams requires"
5778 " 2 HAL metadata for reprocessing");
5779 pthread_mutex_unlock(&mMutex);
5780 return -EINVAL;
5781 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005782
Emilian Peev656e4fa2017-06-02 16:47:04 +01005783 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5784 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5785 if (depthRequestPresent && mDepthChannel) {
5786 if (request->settings) {
5787 camera_metadata_ro_entry entry;
5788 if (find_camera_metadata_ro_entry(request->settings,
5789 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5790 if (entry.data.u8[0]) {
5791 pdafEnable = CAM_PD_DATA_ENABLED;
5792 } else {
5793 pdafEnable = CAM_PD_DATA_SKIP;
5794 }
5795 mDepthCloudMode = pdafEnable;
5796 } else {
5797 pdafEnable = mDepthCloudMode;
5798 }
5799 } else {
5800 pdafEnable = mDepthCloudMode;
5801 }
5802 }
5803
Emilian Peev7650c122017-01-19 08:24:33 -08005804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5805 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5806 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5807 pthread_mutex_unlock(&mMutex);
5808 return BAD_VALUE;
5809 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005810
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005811 if (request->input_buffer == NULL) {
5812 /* Set the parameters to backend:
5813 * - For every request in NORMAL MODE
5814 * - For every request in HFR mode during preview only case
5815 * - Once every batch in HFR mode during video recording
5816 */
5817 if (!mBatchSize ||
5818 (mBatchSize && !isVidBufRequested) ||
5819 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5820 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5821 mBatchSize, isVidBufRequested,
5822 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005823
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005824 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5825 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5826 uint32_t m = 0;
5827 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5828 if (streamsArray.stream_request[k].streamID ==
5829 mBatchedStreamsArray.stream_request[m].streamID)
5830 break;
5831 }
5832 if (m == mBatchedStreamsArray.num_streams) {
5833 mBatchedStreamsArray.stream_request\
5834 [mBatchedStreamsArray.num_streams].streamID =
5835 streamsArray.stream_request[k].streamID;
5836 mBatchedStreamsArray.stream_request\
5837 [mBatchedStreamsArray.num_streams].buf_index =
5838 streamsArray.stream_request[k].buf_index;
5839 mBatchedStreamsArray.num_streams =
5840 mBatchedStreamsArray.num_streams + 1;
5841 }
5842 }
5843 streamsArray = mBatchedStreamsArray;
5844 }
5845 /* Update stream id of all the requested buffers */
5846 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5847 streamsArray)) {
5848 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005849 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005850 return BAD_VALUE;
5851 }
5852
5853 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5854 mParameters);
5855 if (rc < 0) {
5856 LOGE("set_parms failed");
5857 }
5858 /* reset to zero coz, the batch is queued */
5859 mToBeQueuedVidBufs = 0;
5860 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5861 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5862 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005863 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5864 uint32_t m = 0;
5865 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5866 if (streamsArray.stream_request[k].streamID ==
5867 mBatchedStreamsArray.stream_request[m].streamID)
5868 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005869 }
5870 if (m == mBatchedStreamsArray.num_streams) {
5871 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5872 streamID = streamsArray.stream_request[k].streamID;
5873 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5874 buf_index = streamsArray.stream_request[k].buf_index;
5875 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5876 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005877 }
5878 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005879 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005880
5881 // Start all streams after the first setting is sent, so that the
5882 // setting can be applied sooner: (0 + apply_delay)th frame.
5883 if (mState == CONFIGURED && mChannelHandle) {
5884 //Then start them.
5885 LOGH("Start META Channel");
5886 rc = mMetadataChannel->start();
5887 if (rc < 0) {
5888 LOGE("META channel start failed");
5889 pthread_mutex_unlock(&mMutex);
5890 return rc;
5891 }
5892
5893 if (mAnalysisChannel) {
5894 rc = mAnalysisChannel->start();
5895 if (rc < 0) {
5896 LOGE("Analysis channel start failed");
5897 mMetadataChannel->stop();
5898 pthread_mutex_unlock(&mMutex);
5899 return rc;
5900 }
5901 }
5902
5903 if (mSupportChannel) {
5904 rc = mSupportChannel->start();
5905 if (rc < 0) {
5906 LOGE("Support channel start failed");
5907 mMetadataChannel->stop();
5908 /* Although support and analysis are mutually exclusive today
5909 adding it in anycase for future proofing */
5910 if (mAnalysisChannel) {
5911 mAnalysisChannel->stop();
5912 }
5913 pthread_mutex_unlock(&mMutex);
5914 return rc;
5915 }
5916 }
5917 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5918 it != mStreamInfo.end(); it++) {
5919 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5920 LOGH("Start Processing Channel mask=%d",
5921 channel->getStreamTypeMask());
5922 rc = channel->start();
5923 if (rc < 0) {
5924 LOGE("channel start failed");
5925 pthread_mutex_unlock(&mMutex);
5926 return rc;
5927 }
5928 }
5929
5930 if (mRawDumpChannel) {
5931 LOGD("Starting raw dump stream");
5932 rc = mRawDumpChannel->start();
5933 if (rc != NO_ERROR) {
5934 LOGE("Error Starting Raw Dump Channel");
5935 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5936 it != mStreamInfo.end(); it++) {
5937 QCamera3Channel *channel =
5938 (QCamera3Channel *)(*it)->stream->priv;
5939 LOGH("Stopping Processing Channel mask=%d",
5940 channel->getStreamTypeMask());
5941 channel->stop();
5942 }
5943 if (mSupportChannel)
5944 mSupportChannel->stop();
5945 if (mAnalysisChannel) {
5946 mAnalysisChannel->stop();
5947 }
5948 mMetadataChannel->stop();
5949 pthread_mutex_unlock(&mMutex);
5950 return rc;
5951 }
5952 }
5953
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005954 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005955 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005956 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005957 if (rc != NO_ERROR) {
5958 LOGE("start_channel failed %d", rc);
5959 pthread_mutex_unlock(&mMutex);
5960 return rc;
5961 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005962
5963 {
5964 // Configure Easel for stream on.
5965 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005966
5967 // Now that sensor mode should have been selected, get the selected sensor mode
5968 // info.
5969 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5970 getCurrentSensorModeInfo(mSensorModeInfo);
5971
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005972 if (EaselManagerClientOpened) {
5973 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen44abb642017-06-02 18:00:38 -07005974 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5975 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005976 if (rc != OK) {
5977 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5978 mCameraId, mSensorModeInfo.op_pixel_clk);
5979 pthread_mutex_unlock(&mMutex);
5980 return rc;
5981 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005982 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005983 }
5984 }
5985
5986 // Start sensor streaming.
5987 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5988 mChannelHandle);
5989 if (rc != NO_ERROR) {
5990 LOGE("start_sensor_stream_on failed %d", rc);
5991 pthread_mutex_unlock(&mMutex);
5992 return rc;
5993 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005994 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005995 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005996 }
5997
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005998 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07005999 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006000 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006001 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006002 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6003 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6004 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6005 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
6006 rc = enableHdrPlusModeLocked();
6007 if (rc != OK) {
6008 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6009 pthread_mutex_unlock(&mMutex);
6010 return rc;
6011 }
6012
6013 mFirstPreviewIntentSeen = true;
6014 }
6015 }
6016
Thierry Strudel3d639192016-09-09 11:52:26 -07006017 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6018
6019 mState = STARTED;
6020 // Added a timed condition wait
6021 struct timespec ts;
6022 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006023 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006024 if (rc < 0) {
6025 isValidTimeout = 0;
6026 LOGE("Error reading the real time clock!!");
6027 }
6028 else {
6029 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006030 int64_t timeout = 5;
6031 {
6032 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6033 // If there is a pending HDR+ request, the following requests may be blocked until the
6034 // HDR+ request is done. So allow a longer timeout.
6035 if (mHdrPlusPendingRequests.size() > 0) {
6036 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6037 }
6038 }
6039 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006040 }
6041 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006042 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006043 (mState != ERROR) && (mState != DEINIT)) {
6044 if (!isValidTimeout) {
6045 LOGD("Blocking on conditional wait");
6046 pthread_cond_wait(&mRequestCond, &mMutex);
6047 }
6048 else {
6049 LOGD("Blocking on timed conditional wait");
6050 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6051 if (rc == ETIMEDOUT) {
6052 rc = -ENODEV;
6053 LOGE("Unblocked on timeout!!!!");
6054 break;
6055 }
6056 }
6057 LOGD("Unblocked");
6058 if (mWokenUpByDaemon) {
6059 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006060 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006061 break;
6062 }
6063 }
6064 pthread_mutex_unlock(&mMutex);
6065
6066 return rc;
6067}
6068
6069/*===========================================================================
6070 * FUNCTION : dump
6071 *
6072 * DESCRIPTION:
6073 *
6074 * PARAMETERS :
6075 *
6076 *
6077 * RETURN :
6078 *==========================================================================*/
6079void QCamera3HardwareInterface::dump(int fd)
6080{
6081 pthread_mutex_lock(&mMutex);
6082 dprintf(fd, "\n Camera HAL3 information Begin \n");
6083
6084 dprintf(fd, "\nNumber of pending requests: %zu \n",
6085 mPendingRequestsList.size());
6086 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6087 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6088 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6089 for(pendingRequestIterator i = mPendingRequestsList.begin();
6090 i != mPendingRequestsList.end(); i++) {
6091 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6092 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6093 i->input_buffer);
6094 }
6095 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6096 mPendingBuffersMap.get_num_overall_buffers());
6097 dprintf(fd, "-------+------------------\n");
6098 dprintf(fd, " Frame | Stream type mask \n");
6099 dprintf(fd, "-------+------------------\n");
6100 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6101 for(auto &j : req.mPendingBufferList) {
6102 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6103 dprintf(fd, " %5d | %11d \n",
6104 req.frame_number, channel->getStreamTypeMask());
6105 }
6106 }
6107 dprintf(fd, "-------+------------------\n");
6108
6109 dprintf(fd, "\nPending frame drop list: %zu\n",
6110 mPendingFrameDropList.size());
6111 dprintf(fd, "-------+-----------\n");
6112 dprintf(fd, " Frame | Stream ID \n");
6113 dprintf(fd, "-------+-----------\n");
6114 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6115 i != mPendingFrameDropList.end(); i++) {
6116 dprintf(fd, " %5d | %9d \n",
6117 i->frame_number, i->stream_ID);
6118 }
6119 dprintf(fd, "-------+-----------\n");
6120
6121 dprintf(fd, "\n Camera HAL3 information End \n");
6122
6123 /* use dumpsys media.camera as trigger to send update debug level event */
6124 mUpdateDebugLevel = true;
6125 pthread_mutex_unlock(&mMutex);
6126 return;
6127}
6128
6129/*===========================================================================
6130 * FUNCTION : flush
6131 *
6132 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6133 * conditionally restarts channels
6134 *
6135 * PARAMETERS :
6136 * @ restartChannels: re-start all channels
6137 *
6138 *
6139 * RETURN :
6140 * 0 on success
6141 * Error code on failure
6142 *==========================================================================*/
6143int QCamera3HardwareInterface::flush(bool restartChannels)
6144{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006145 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006146 int32_t rc = NO_ERROR;
6147
6148 LOGD("Unblocking Process Capture Request");
6149 pthread_mutex_lock(&mMutex);
6150 mFlush = true;
6151 pthread_mutex_unlock(&mMutex);
6152
6153 rc = stopAllChannels();
6154 // unlink of dualcam
6155 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006156 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6157 &m_pDualCamCmdPtr->bundle_info;
6158 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006159 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6160 pthread_mutex_lock(&gCamLock);
6161
6162 if (mIsMainCamera == 1) {
6163 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6164 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006165 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006166 // related session id should be session id of linked session
6167 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6168 } else {
6169 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6170 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006171 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006172 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6173 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006174 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006175 pthread_mutex_unlock(&gCamLock);
6176
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006177 rc = mCameraHandle->ops->set_dual_cam_cmd(
6178 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006179 if (rc < 0) {
6180 LOGE("Dualcam: Unlink failed, but still proceed to close");
6181 }
6182 }
6183
6184 if (rc < 0) {
6185 LOGE("stopAllChannels failed");
6186 return rc;
6187 }
6188 if (mChannelHandle) {
6189 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6190 mChannelHandle);
6191 }
6192
6193 // Reset bundle info
6194 rc = setBundleInfo();
6195 if (rc < 0) {
6196 LOGE("setBundleInfo failed %d", rc);
6197 return rc;
6198 }
6199
6200 // Mutex Lock
6201 pthread_mutex_lock(&mMutex);
6202
6203 // Unblock process_capture_request
6204 mPendingLiveRequest = 0;
6205 pthread_cond_signal(&mRequestCond);
6206
6207 rc = notifyErrorForPendingRequests();
6208 if (rc < 0) {
6209 LOGE("notifyErrorForPendingRequests failed");
6210 pthread_mutex_unlock(&mMutex);
6211 return rc;
6212 }
6213
6214 mFlush = false;
6215
6216 // Start the Streams/Channels
6217 if (restartChannels) {
6218 rc = startAllChannels();
6219 if (rc < 0) {
6220 LOGE("startAllChannels failed");
6221 pthread_mutex_unlock(&mMutex);
6222 return rc;
6223 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006224 if (mChannelHandle) {
6225 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006226 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006227 if (rc < 0) {
6228 LOGE("start_channel failed");
6229 pthread_mutex_unlock(&mMutex);
6230 return rc;
6231 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006232 }
6233 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006234 pthread_mutex_unlock(&mMutex);
6235
6236 return 0;
6237}
6238
6239/*===========================================================================
6240 * FUNCTION : flushPerf
6241 *
6242 * DESCRIPTION: This is the performance optimization version of flush that does
6243 * not use stream off, rather flushes the system
6244 *
6245 * PARAMETERS :
6246 *
6247 *
6248 * RETURN : 0 : success
6249 * -EINVAL: input is malformed (device is not valid)
6250 * -ENODEV: if the device has encountered a serious error
6251 *==========================================================================*/
6252int QCamera3HardwareInterface::flushPerf()
6253{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006254 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006255 int32_t rc = 0;
6256 struct timespec timeout;
6257 bool timed_wait = false;
6258
6259 pthread_mutex_lock(&mMutex);
6260 mFlushPerf = true;
6261 mPendingBuffersMap.numPendingBufsAtFlush =
6262 mPendingBuffersMap.get_num_overall_buffers();
6263 LOGD("Calling flush. Wait for %d buffers to return",
6264 mPendingBuffersMap.numPendingBufsAtFlush);
6265
6266 /* send the flush event to the backend */
6267 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6268 if (rc < 0) {
6269 LOGE("Error in flush: IOCTL failure");
6270 mFlushPerf = false;
6271 pthread_mutex_unlock(&mMutex);
6272 return -ENODEV;
6273 }
6274
6275 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6276 LOGD("No pending buffers in HAL, return flush");
6277 mFlushPerf = false;
6278 pthread_mutex_unlock(&mMutex);
6279 return rc;
6280 }
6281
6282 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006283 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006284 if (rc < 0) {
6285 LOGE("Error reading the real time clock, cannot use timed wait");
6286 } else {
6287 timeout.tv_sec += FLUSH_TIMEOUT;
6288 timed_wait = true;
6289 }
6290
6291 //Block on conditional variable
6292 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6293 LOGD("Waiting on mBuffersCond");
6294 if (!timed_wait) {
6295 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6296 if (rc != 0) {
6297 LOGE("pthread_cond_wait failed due to rc = %s",
6298 strerror(rc));
6299 break;
6300 }
6301 } else {
6302 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6303 if (rc != 0) {
6304 LOGE("pthread_cond_timedwait failed due to rc = %s",
6305 strerror(rc));
6306 break;
6307 }
6308 }
6309 }
6310 if (rc != 0) {
6311 mFlushPerf = false;
6312 pthread_mutex_unlock(&mMutex);
6313 return -ENODEV;
6314 }
6315
6316 LOGD("Received buffers, now safe to return them");
6317
6318 //make sure the channels handle flush
6319 //currently only required for the picture channel to release snapshot resources
6320 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6321 it != mStreamInfo.end(); it++) {
6322 QCamera3Channel *channel = (*it)->channel;
6323 if (channel) {
6324 rc = channel->flush();
6325 if (rc) {
6326 LOGE("Flushing the channels failed with error %d", rc);
6327 // even though the channel flush failed we need to continue and
6328 // return the buffers we have to the framework, however the return
6329 // value will be an error
6330 rc = -ENODEV;
6331 }
6332 }
6333 }
6334
6335 /* notify the frameworks and send errored results */
6336 rc = notifyErrorForPendingRequests();
6337 if (rc < 0) {
6338 LOGE("notifyErrorForPendingRequests failed");
6339 pthread_mutex_unlock(&mMutex);
6340 return rc;
6341 }
6342
6343 //unblock process_capture_request
6344 mPendingLiveRequest = 0;
6345 unblockRequestIfNecessary();
6346
6347 mFlushPerf = false;
6348 pthread_mutex_unlock(&mMutex);
6349 LOGD ("Flush Operation complete. rc = %d", rc);
6350 return rc;
6351}
6352
6353/*===========================================================================
6354 * FUNCTION : handleCameraDeviceError
6355 *
6356 * DESCRIPTION: This function calls internal flush and notifies the error to
6357 * framework and updates the state variable.
6358 *
6359 * PARAMETERS : None
6360 *
6361 * RETURN : NO_ERROR on Success
6362 * Error code on failure
6363 *==========================================================================*/
6364int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6365{
6366 int32_t rc = NO_ERROR;
6367
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006368 {
6369 Mutex::Autolock lock(mFlushLock);
6370 pthread_mutex_lock(&mMutex);
6371 if (mState != ERROR) {
6372 //if mState != ERROR, nothing to be done
6373 pthread_mutex_unlock(&mMutex);
6374 return NO_ERROR;
6375 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006376 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006377
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006378 rc = flush(false /* restart channels */);
6379 if (NO_ERROR != rc) {
6380 LOGE("internal flush to handle mState = ERROR failed");
6381 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006382
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006383 pthread_mutex_lock(&mMutex);
6384 mState = DEINIT;
6385 pthread_mutex_unlock(&mMutex);
6386 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006387
6388 camera3_notify_msg_t notify_msg;
6389 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6390 notify_msg.type = CAMERA3_MSG_ERROR;
6391 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6392 notify_msg.message.error.error_stream = NULL;
6393 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006394 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006395
6396 return rc;
6397}
6398
6399/*===========================================================================
6400 * FUNCTION : captureResultCb
6401 *
6402 * DESCRIPTION: Callback handler for all capture result
6403 * (streams, as well as metadata)
6404 *
6405 * PARAMETERS :
6406 * @metadata : metadata information
6407 * @buffer : actual gralloc buffer to be returned to frameworks.
6408 * NULL if metadata.
6409 *
6410 * RETURN : NONE
6411 *==========================================================================*/
6412void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6413 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6414{
6415 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006416 pthread_mutex_lock(&mMutex);
6417 uint8_t batchSize = mBatchSize;
6418 pthread_mutex_unlock(&mMutex);
6419 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006420 handleBatchMetadata(metadata_buf,
6421 true /* free_and_bufdone_meta_buf */);
6422 } else { /* mBatchSize = 0 */
6423 hdrPlusPerfLock(metadata_buf);
6424 pthread_mutex_lock(&mMutex);
6425 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006426 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006427 true /* last urgent frame of batch metadata */,
6428 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006429 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006430 pthread_mutex_unlock(&mMutex);
6431 }
6432 } else if (isInputBuffer) {
6433 pthread_mutex_lock(&mMutex);
6434 handleInputBufferWithLock(frame_number);
6435 pthread_mutex_unlock(&mMutex);
6436 } else {
6437 pthread_mutex_lock(&mMutex);
6438 handleBufferWithLock(buffer, frame_number);
6439 pthread_mutex_unlock(&mMutex);
6440 }
6441 return;
6442}
6443
6444/*===========================================================================
6445 * FUNCTION : getReprocessibleOutputStreamId
6446 *
6447 * DESCRIPTION: Get source output stream id for the input reprocess stream
6448 * based on size and format, which would be the largest
6449 * output stream if an input stream exists.
6450 *
6451 * PARAMETERS :
6452 * @id : return the stream id if found
6453 *
6454 * RETURN : int32_t type of status
6455 * NO_ERROR -- success
6456 * none-zero failure code
6457 *==========================================================================*/
6458int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6459{
6460 /* check if any output or bidirectional stream with the same size and format
6461 and return that stream */
6462 if ((mInputStreamInfo.dim.width > 0) &&
6463 (mInputStreamInfo.dim.height > 0)) {
6464 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6465 it != mStreamInfo.end(); it++) {
6466
6467 camera3_stream_t *stream = (*it)->stream;
6468 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6469 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6470 (stream->format == mInputStreamInfo.format)) {
6471 // Usage flag for an input stream and the source output stream
6472 // may be different.
6473 LOGD("Found reprocessible output stream! %p", *it);
6474 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6475 stream->usage, mInputStreamInfo.usage);
6476
6477 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6478 if (channel != NULL && channel->mStreams[0]) {
6479 id = channel->mStreams[0]->getMyServerID();
6480 return NO_ERROR;
6481 }
6482 }
6483 }
6484 } else {
6485 LOGD("No input stream, so no reprocessible output stream");
6486 }
6487 return NAME_NOT_FOUND;
6488}
6489
6490/*===========================================================================
6491 * FUNCTION : lookupFwkName
6492 *
6493 * DESCRIPTION: In case the enum is not same in fwk and backend
6494 * make sure the parameter is correctly propogated
6495 *
6496 * PARAMETERS :
6497 * @arr : map between the two enums
6498 * @len : len of the map
6499 * @hal_name : name of the hal_parm to map
6500 *
6501 * RETURN : int type of status
6502 * fwk_name -- success
6503 * none-zero failure code
6504 *==========================================================================*/
6505template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6506 size_t len, halType hal_name)
6507{
6508
6509 for (size_t i = 0; i < len; i++) {
6510 if (arr[i].hal_name == hal_name) {
6511 return arr[i].fwk_name;
6512 }
6513 }
6514
6515 /* Not able to find matching framework type is not necessarily
6516 * an error case. This happens when mm-camera supports more attributes
6517 * than the frameworks do */
6518 LOGH("Cannot find matching framework type");
6519 return NAME_NOT_FOUND;
6520}
6521
6522/*===========================================================================
6523 * FUNCTION : lookupHalName
6524 *
6525 * DESCRIPTION: In case the enum is not same in fwk and backend
6526 * make sure the parameter is correctly propogated
6527 *
6528 * PARAMETERS :
6529 * @arr : map between the two enums
6530 * @len : len of the map
6531 * @fwk_name : name of the hal_parm to map
6532 *
6533 * RETURN : int32_t type of status
6534 * hal_name -- success
6535 * none-zero failure code
6536 *==========================================================================*/
6537template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6538 size_t len, fwkType fwk_name)
6539{
6540 for (size_t i = 0; i < len; i++) {
6541 if (arr[i].fwk_name == fwk_name) {
6542 return arr[i].hal_name;
6543 }
6544 }
6545
6546 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6547 return NAME_NOT_FOUND;
6548}
6549
6550/*===========================================================================
6551 * FUNCTION : lookupProp
6552 *
6553 * DESCRIPTION: lookup a value by its name
6554 *
6555 * PARAMETERS :
6556 * @arr : map between the two enums
6557 * @len : size of the map
6558 * @name : name to be looked up
6559 *
6560 * RETURN : Value if found
6561 * CAM_CDS_MODE_MAX if not found
6562 *==========================================================================*/
6563template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6564 size_t len, const char *name)
6565{
6566 if (name) {
6567 for (size_t i = 0; i < len; i++) {
6568 if (!strcmp(arr[i].desc, name)) {
6569 return arr[i].val;
6570 }
6571 }
6572 }
6573 return CAM_CDS_MODE_MAX;
6574}
6575
6576/*===========================================================================
6577 *
6578 * DESCRIPTION:
6579 *
6580 * PARAMETERS :
6581 * @metadata : metadata information from callback
6582 * @timestamp: metadata buffer timestamp
6583 * @request_id: request id
6584 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006585 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006586 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6587 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006588 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006589 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6590 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006591 *
6592 * RETURN : camera_metadata_t*
6593 * metadata in a format specified by fwk
6594 *==========================================================================*/
6595camera_metadata_t*
6596QCamera3HardwareInterface::translateFromHalMetadata(
6597 metadata_buffer_t *metadata,
6598 nsecs_t timestamp,
6599 int32_t request_id,
6600 const CameraMetadata& jpegMetadata,
6601 uint8_t pipeline_depth,
6602 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006603 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006604 /* DevCamDebug metadata translateFromHalMetadata argument */
6605 uint8_t DevCamDebug_meta_enable,
6606 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006607 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006608 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006609 bool lastMetadataInBatch,
6610 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006611{
6612 CameraMetadata camMetadata;
6613 camera_metadata_t *resultMetadata;
6614
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006615 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006616 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6617 * Timestamp is needed because it's used for shutter notify calculation.
6618 * */
6619 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6620 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006621 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006622 }
6623
Thierry Strudel3d639192016-09-09 11:52:26 -07006624 if (jpegMetadata.entryCount())
6625 camMetadata.append(jpegMetadata);
6626
6627 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6628 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6629 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6630 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006631 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006632 if (mBatchSize == 0) {
6633 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6634 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6635 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006636
Samuel Ha68ba5172016-12-15 18:41:12 -08006637 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6638 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6639 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6640 // DevCamDebug metadata translateFromHalMetadata AF
6641 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6642 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6643 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6644 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6645 }
6646 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6647 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6648 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6649 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6650 }
6651 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6652 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6653 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6654 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6655 }
6656 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6657 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6658 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6659 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6660 }
6661 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6662 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6663 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6664 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6665 }
6666 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6667 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6668 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6669 *DevCamDebug_af_monitor_pdaf_target_pos;
6670 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6671 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6672 }
6673 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6674 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6675 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6676 *DevCamDebug_af_monitor_pdaf_confidence;
6677 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6678 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6679 }
6680 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6681 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6682 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6683 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6684 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6685 }
6686 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6687 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6688 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6689 *DevCamDebug_af_monitor_tof_target_pos;
6690 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6691 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6692 }
6693 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6694 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6695 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6696 *DevCamDebug_af_monitor_tof_confidence;
6697 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6698 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6699 }
6700 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6701 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6702 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6703 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6704 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6705 }
6706 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6707 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6708 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6709 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6710 &fwk_DevCamDebug_af_monitor_type_select, 1);
6711 }
6712 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6713 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6714 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6715 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6716 &fwk_DevCamDebug_af_monitor_refocus, 1);
6717 }
6718 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6719 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6720 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6721 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6722 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6723 }
6724 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6725 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6726 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6727 *DevCamDebug_af_search_pdaf_target_pos;
6728 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6729 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6730 }
6731 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6732 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6733 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6734 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6735 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6736 }
6737 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6738 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6739 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6740 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6741 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6742 }
6743 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6744 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6745 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6746 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6747 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6748 }
6749 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6750 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6751 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6752 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6753 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6754 }
6755 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6756 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6757 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6758 *DevCamDebug_af_search_tof_target_pos;
6759 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6760 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6761 }
6762 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6763 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6764 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6765 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6766 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6767 }
6768 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6769 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6770 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6771 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6772 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6773 }
6774 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6775 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6776 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6777 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6778 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6779 }
6780 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6781 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6782 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6783 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6784 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6785 }
6786 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6787 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6788 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6789 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6790 &fwk_DevCamDebug_af_search_type_select, 1);
6791 }
6792 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6793 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6794 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6795 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6796 &fwk_DevCamDebug_af_search_next_pos, 1);
6797 }
6798 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6799 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6800 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6801 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6802 &fwk_DevCamDebug_af_search_target_pos, 1);
6803 }
6804 // DevCamDebug metadata translateFromHalMetadata AEC
6805 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6806 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6807 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6808 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6809 }
6810 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6811 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6812 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6813 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6814 }
6815 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6816 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6817 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6818 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6819 }
6820 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6821 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6822 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6823 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6824 }
6825 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6826 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6827 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6828 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6829 }
6830 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6831 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6832 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6833 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6834 }
6835 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6836 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6837 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6838 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6839 }
6840 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6841 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6842 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6843 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6844 }
Samuel Ha34229982017-02-17 13:51:11 -08006845 // DevCamDebug metadata translateFromHalMetadata zzHDR
6846 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6847 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6848 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6849 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6850 }
6851 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6852 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006853 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006854 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6855 }
6856 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6857 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6858 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6859 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6860 }
6861 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6862 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006863 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006864 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6865 }
6866 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6867 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6868 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6869 *DevCamDebug_aec_hdr_sensitivity_ratio;
6870 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6871 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6872 }
6873 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6874 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6875 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6876 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6877 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6878 }
6879 // DevCamDebug metadata translateFromHalMetadata ADRC
6880 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6881 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6882 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6883 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6884 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6885 }
6886 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6887 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6888 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6889 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6890 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6891 }
6892 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6893 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6894 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6895 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6896 }
6897 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6898 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6899 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6900 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6901 }
6902 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6903 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6904 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6905 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6906 }
6907 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6908 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6909 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6910 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6911 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006912 // DevCamDebug metadata translateFromHalMetadata AWB
6913 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6914 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6915 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6916 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6917 }
6918 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6919 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6920 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6921 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6922 }
6923 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6924 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6925 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6926 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6927 }
6928 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6929 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6930 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6931 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6932 }
6933 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6934 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6935 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6936 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6937 }
6938 }
6939 // atrace_end(ATRACE_TAG_ALWAYS);
6940
Thierry Strudel3d639192016-09-09 11:52:26 -07006941 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6942 int64_t fwk_frame_number = *frame_number;
6943 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6944 }
6945
6946 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6947 int32_t fps_range[2];
6948 fps_range[0] = (int32_t)float_range->min_fps;
6949 fps_range[1] = (int32_t)float_range->max_fps;
6950 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6951 fps_range, 2);
6952 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6953 fps_range[0], fps_range[1]);
6954 }
6955
6956 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6957 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6958 }
6959
6960 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6961 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6962 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6963 *sceneMode);
6964 if (NAME_NOT_FOUND != val) {
6965 uint8_t fwkSceneMode = (uint8_t)val;
6966 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6967 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6968 fwkSceneMode);
6969 }
6970 }
6971
6972 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6973 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6974 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6975 }
6976
6977 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6978 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6979 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6980 }
6981
6982 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6983 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6984 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6985 }
6986
6987 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6988 CAM_INTF_META_EDGE_MODE, metadata) {
6989 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6990 }
6991
6992 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6993 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6994 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6995 }
6996
6997 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6998 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6999 }
7000
7001 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7002 if (0 <= *flashState) {
7003 uint8_t fwk_flashState = (uint8_t) *flashState;
7004 if (!gCamCapability[mCameraId]->flash_available) {
7005 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7006 }
7007 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7008 }
7009 }
7010
7011 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7012 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7013 if (NAME_NOT_FOUND != val) {
7014 uint8_t fwk_flashMode = (uint8_t)val;
7015 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7016 }
7017 }
7018
7019 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7020 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7021 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7022 }
7023
7024 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7025 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7026 }
7027
7028 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7029 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7030 }
7031
7032 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7033 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7034 }
7035
7036 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7037 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7038 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7039 }
7040
7041 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7042 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7043 LOGD("fwk_videoStab = %d", fwk_videoStab);
7044 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7045 } else {
7046 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7047 // and so hardcoding the Video Stab result to OFF mode.
7048 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7049 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007050 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007051 }
7052
7053 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7054 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7055 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7056 }
7057
7058 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7059 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7060 }
7061
Thierry Strudel3d639192016-09-09 11:52:26 -07007062 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7063 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007064 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007065
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007066 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7067 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007068
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007069 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007070 blackLevelAppliedPattern->cam_black_level[0],
7071 blackLevelAppliedPattern->cam_black_level[1],
7072 blackLevelAppliedPattern->cam_black_level[2],
7073 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007074 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7075 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007076
7077#ifndef USE_HAL_3_3
7078 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307079 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007080 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307081 fwk_blackLevelInd[0] /= 16.0;
7082 fwk_blackLevelInd[1] /= 16.0;
7083 fwk_blackLevelInd[2] /= 16.0;
7084 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007085 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7086 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007087#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007088 }
7089
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007090#ifndef USE_HAL_3_3
7091 // Fixed whitelevel is used by ISP/Sensor
7092 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7093 &gCamCapability[mCameraId]->white_level, 1);
7094#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007095
7096 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7097 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7098 int32_t scalerCropRegion[4];
7099 scalerCropRegion[0] = hScalerCropRegion->left;
7100 scalerCropRegion[1] = hScalerCropRegion->top;
7101 scalerCropRegion[2] = hScalerCropRegion->width;
7102 scalerCropRegion[3] = hScalerCropRegion->height;
7103
7104 // Adjust crop region from sensor output coordinate system to active
7105 // array coordinate system.
7106 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7107 scalerCropRegion[2], scalerCropRegion[3]);
7108
7109 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7110 }
7111
7112 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7113 LOGD("sensorExpTime = %lld", *sensorExpTime);
7114 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7115 }
7116
7117 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7118 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7119 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7120 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7121 }
7122
7123 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7124 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7125 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7126 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7127 sensorRollingShutterSkew, 1);
7128 }
7129
7130 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7131 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7132 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7133
7134 //calculate the noise profile based on sensitivity
7135 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7136 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7137 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7138 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7139 noise_profile[i] = noise_profile_S;
7140 noise_profile[i+1] = noise_profile_O;
7141 }
7142 LOGD("noise model entry (S, O) is (%f, %f)",
7143 noise_profile_S, noise_profile_O);
7144 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7145 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7146 }
7147
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007148#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007149 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007150 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007151 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007152 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007153 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7154 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7155 }
7156 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007157#endif
7158
Thierry Strudel3d639192016-09-09 11:52:26 -07007159 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7160 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7161 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7162 }
7163
7164 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7165 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7166 *faceDetectMode);
7167 if (NAME_NOT_FOUND != val) {
7168 uint8_t fwk_faceDetectMode = (uint8_t)val;
7169 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7170
7171 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7172 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7173 CAM_INTF_META_FACE_DETECTION, metadata) {
7174 uint8_t numFaces = MIN(
7175 faceDetectionInfo->num_faces_detected, MAX_ROI);
7176 int32_t faceIds[MAX_ROI];
7177 uint8_t faceScores[MAX_ROI];
7178 int32_t faceRectangles[MAX_ROI * 4];
7179 int32_t faceLandmarks[MAX_ROI * 6];
7180 size_t j = 0, k = 0;
7181
7182 for (size_t i = 0; i < numFaces; i++) {
7183 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7184 // Adjust crop region from sensor output coordinate system to active
7185 // array coordinate system.
7186 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7187 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7188 rect.width, rect.height);
7189
7190 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7191 faceRectangles+j, -1);
7192
Jason Lee8ce36fa2017-04-19 19:40:37 -07007193 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7194 "bottom-right (%d, %d)",
7195 faceDetectionInfo->frame_id, i,
7196 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7197 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7198
Thierry Strudel3d639192016-09-09 11:52:26 -07007199 j+= 4;
7200 }
7201 if (numFaces <= 0) {
7202 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7203 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7204 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7205 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7206 }
7207
7208 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7209 numFaces);
7210 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7211 faceRectangles, numFaces * 4U);
7212 if (fwk_faceDetectMode ==
7213 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7214 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7215 CAM_INTF_META_FACE_LANDMARK, metadata) {
7216
7217 for (size_t i = 0; i < numFaces; i++) {
7218 // Map the co-ordinate sensor output coordinate system to active
7219 // array coordinate system.
7220 mCropRegionMapper.toActiveArray(
7221 landmarks->face_landmarks[i].left_eye_center.x,
7222 landmarks->face_landmarks[i].left_eye_center.y);
7223 mCropRegionMapper.toActiveArray(
7224 landmarks->face_landmarks[i].right_eye_center.x,
7225 landmarks->face_landmarks[i].right_eye_center.y);
7226 mCropRegionMapper.toActiveArray(
7227 landmarks->face_landmarks[i].mouth_center.x,
7228 landmarks->face_landmarks[i].mouth_center.y);
7229
7230 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007231
7232 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7233 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7234 faceDetectionInfo->frame_id, i,
7235 faceLandmarks[k + LEFT_EYE_X],
7236 faceLandmarks[k + LEFT_EYE_Y],
7237 faceLandmarks[k + RIGHT_EYE_X],
7238 faceLandmarks[k + RIGHT_EYE_Y],
7239 faceLandmarks[k + MOUTH_X],
7240 faceLandmarks[k + MOUTH_Y]);
7241
Thierry Strudel04e026f2016-10-10 11:27:36 -07007242 k+= TOTAL_LANDMARK_INDICES;
7243 }
7244 } else {
7245 for (size_t i = 0; i < numFaces; i++) {
7246 setInvalidLandmarks(faceLandmarks+k);
7247 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007248 }
7249 }
7250
Jason Lee49619db2017-04-13 12:07:22 -07007251 for (size_t i = 0; i < numFaces; i++) {
7252 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7253
7254 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7255 faceDetectionInfo->frame_id, i, faceIds[i]);
7256 }
7257
Thierry Strudel3d639192016-09-09 11:52:26 -07007258 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7259 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7260 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007261 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007262 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7263 CAM_INTF_META_FACE_BLINK, metadata) {
7264 uint8_t detected[MAX_ROI];
7265 uint8_t degree[MAX_ROI * 2];
7266 for (size_t i = 0; i < numFaces; i++) {
7267 detected[i] = blinks->blink[i].blink_detected;
7268 degree[2 * i] = blinks->blink[i].left_blink;
7269 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007270
Jason Lee49619db2017-04-13 12:07:22 -07007271 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7272 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7273 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7274 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007275 }
7276 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7277 detected, numFaces);
7278 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7279 degree, numFaces * 2);
7280 }
7281 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7282 CAM_INTF_META_FACE_SMILE, metadata) {
7283 uint8_t degree[MAX_ROI];
7284 uint8_t confidence[MAX_ROI];
7285 for (size_t i = 0; i < numFaces; i++) {
7286 degree[i] = smiles->smile[i].smile_degree;
7287 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007288
Jason Lee49619db2017-04-13 12:07:22 -07007289 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7290 "smile_degree=%d, smile_score=%d",
7291 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007292 }
7293 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7294 degree, numFaces);
7295 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7296 confidence, numFaces);
7297 }
7298 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7299 CAM_INTF_META_FACE_GAZE, metadata) {
7300 int8_t angle[MAX_ROI];
7301 int32_t direction[MAX_ROI * 3];
7302 int8_t degree[MAX_ROI * 2];
7303 for (size_t i = 0; i < numFaces; i++) {
7304 angle[i] = gazes->gaze[i].gaze_angle;
7305 direction[3 * i] = gazes->gaze[i].updown_dir;
7306 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7307 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7308 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7309 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007310
7311 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7312 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7313 "left_right_gaze=%d, top_bottom_gaze=%d",
7314 faceDetectionInfo->frame_id, i, angle[i],
7315 direction[3 * i], direction[3 * i + 1],
7316 direction[3 * i + 2],
7317 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007318 }
7319 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7320 (uint8_t *)angle, numFaces);
7321 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7322 direction, numFaces * 3);
7323 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7324 (uint8_t *)degree, numFaces * 2);
7325 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007326 }
7327 }
7328 }
7329 }
7330
7331 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7332 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007333 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007334 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007335 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007336
Shuzhen Wang14415f52016-11-16 18:26:18 -08007337 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7338 histogramBins = *histBins;
7339 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7340 }
7341
7342 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007343 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7344 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007345 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007346
7347 switch (stats_data->type) {
7348 case CAM_HISTOGRAM_TYPE_BAYER:
7349 switch (stats_data->bayer_stats.data_type) {
7350 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007351 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7352 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007353 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007354 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7355 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007356 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007357 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7358 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007359 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007360 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007361 case CAM_STATS_CHANNEL_R:
7362 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007363 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7364 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007365 }
7366 break;
7367 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007368 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007369 break;
7370 }
7371
Shuzhen Wang14415f52016-11-16 18:26:18 -08007372 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007373 }
7374 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007375 }
7376
7377 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7378 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7379 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7380 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7381 }
7382
7383 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7384 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7385 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7386 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7387 }
7388
7389 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7390 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7391 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7392 CAM_MAX_SHADING_MAP_HEIGHT);
7393 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7394 CAM_MAX_SHADING_MAP_WIDTH);
7395 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7396 lensShadingMap->lens_shading, 4U * map_width * map_height);
7397 }
7398
7399 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7400 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7401 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7402 }
7403
7404 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7405 //Populate CAM_INTF_META_TONEMAP_CURVES
7406 /* ch0 = G, ch 1 = B, ch 2 = R*/
7407 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7408 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7409 tonemap->tonemap_points_cnt,
7410 CAM_MAX_TONEMAP_CURVE_SIZE);
7411 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7412 }
7413
7414 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7415 &tonemap->curves[0].tonemap_points[0][0],
7416 tonemap->tonemap_points_cnt * 2);
7417
7418 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7419 &tonemap->curves[1].tonemap_points[0][0],
7420 tonemap->tonemap_points_cnt * 2);
7421
7422 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7423 &tonemap->curves[2].tonemap_points[0][0],
7424 tonemap->tonemap_points_cnt * 2);
7425 }
7426
7427 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7428 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7429 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7430 CC_GAIN_MAX);
7431 }
7432
7433 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7434 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7435 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7436 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7437 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7438 }
7439
7440 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7441 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7442 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7443 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7444 toneCurve->tonemap_points_cnt,
7445 CAM_MAX_TONEMAP_CURVE_SIZE);
7446 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7447 }
7448 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7449 (float*)toneCurve->curve.tonemap_points,
7450 toneCurve->tonemap_points_cnt * 2);
7451 }
7452
7453 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7454 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7455 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7456 predColorCorrectionGains->gains, 4);
7457 }
7458
7459 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7460 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7461 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7462 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7463 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7464 }
7465
7466 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7467 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7468 }
7469
7470 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7471 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7472 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7473 }
7474
7475 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7476 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7477 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7478 }
7479
7480 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7481 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7482 *effectMode);
7483 if (NAME_NOT_FOUND != val) {
7484 uint8_t fwk_effectMode = (uint8_t)val;
7485 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7486 }
7487 }
7488
7489 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7490 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7491 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7492 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7493 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7494 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7495 }
7496 int32_t fwk_testPatternData[4];
7497 fwk_testPatternData[0] = testPatternData->r;
7498 fwk_testPatternData[3] = testPatternData->b;
7499 switch (gCamCapability[mCameraId]->color_arrangement) {
7500 case CAM_FILTER_ARRANGEMENT_RGGB:
7501 case CAM_FILTER_ARRANGEMENT_GRBG:
7502 fwk_testPatternData[1] = testPatternData->gr;
7503 fwk_testPatternData[2] = testPatternData->gb;
7504 break;
7505 case CAM_FILTER_ARRANGEMENT_GBRG:
7506 case CAM_FILTER_ARRANGEMENT_BGGR:
7507 fwk_testPatternData[2] = testPatternData->gr;
7508 fwk_testPatternData[1] = testPatternData->gb;
7509 break;
7510 default:
7511 LOGE("color arrangement %d is not supported",
7512 gCamCapability[mCameraId]->color_arrangement);
7513 break;
7514 }
7515 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7516 }
7517
7518 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7519 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7520 }
7521
7522 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7523 String8 str((const char *)gps_methods);
7524 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7525 }
7526
7527 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7528 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7529 }
7530
7531 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7532 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7533 }
7534
7535 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7536 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7537 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7538 }
7539
7540 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7541 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7542 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7543 }
7544
7545 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7546 int32_t fwk_thumb_size[2];
7547 fwk_thumb_size[0] = thumb_size->width;
7548 fwk_thumb_size[1] = thumb_size->height;
7549 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7550 }
7551
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007552 // Skip reprocess metadata if there is no input stream.
7553 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7554 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7555 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7556 privateData,
7557 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7558 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007559 }
7560
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007561 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007562 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007563 meteringMode, 1);
7564 }
7565
Thierry Strudel54dc9782017-02-15 12:12:10 -08007566 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7567 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7568 LOGD("hdr_scene_data: %d %f\n",
7569 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7570 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7571 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7572 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7573 &isHdr, 1);
7574 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7575 &isHdrConfidence, 1);
7576 }
7577
7578
7579
Thierry Strudel3d639192016-09-09 11:52:26 -07007580 if (metadata->is_tuning_params_valid) {
7581 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7582 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7583 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7584
7585
7586 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7587 sizeof(uint32_t));
7588 data += sizeof(uint32_t);
7589
7590 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7591 sizeof(uint32_t));
7592 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7593 data += sizeof(uint32_t);
7594
7595 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7596 sizeof(uint32_t));
7597 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7598 data += sizeof(uint32_t);
7599
7600 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7601 sizeof(uint32_t));
7602 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7603 data += sizeof(uint32_t);
7604
7605 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7606 sizeof(uint32_t));
7607 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7608 data += sizeof(uint32_t);
7609
7610 metadata->tuning_params.tuning_mod3_data_size = 0;
7611 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7612 sizeof(uint32_t));
7613 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7614 data += sizeof(uint32_t);
7615
7616 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7617 TUNING_SENSOR_DATA_MAX);
7618 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7619 count);
7620 data += count;
7621
7622 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7623 TUNING_VFE_DATA_MAX);
7624 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7625 count);
7626 data += count;
7627
7628 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7629 TUNING_CPP_DATA_MAX);
7630 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7631 count);
7632 data += count;
7633
7634 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7635 TUNING_CAC_DATA_MAX);
7636 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7637 count);
7638 data += count;
7639
7640 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7641 (int32_t *)(void *)tuning_meta_data_blob,
7642 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7643 }
7644
7645 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7646 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7647 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7648 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7649 NEUTRAL_COL_POINTS);
7650 }
7651
7652 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7653 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7654 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7655 }
7656
7657 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7658 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7659 // Adjust crop region from sensor output coordinate system to active
7660 // array coordinate system.
7661 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7662 hAeRegions->rect.width, hAeRegions->rect.height);
7663
7664 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7665 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7666 REGIONS_TUPLE_COUNT);
7667 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7668 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7669 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7670 hAeRegions->rect.height);
7671 }
7672
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007673 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7674 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7675 if (NAME_NOT_FOUND != val) {
7676 uint8_t fwkAfMode = (uint8_t)val;
7677 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7678 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7679 } else {
7680 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7681 val);
7682 }
7683 }
7684
Thierry Strudel3d639192016-09-09 11:52:26 -07007685 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7686 uint8_t fwk_afState = (uint8_t) *afState;
7687 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007688 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007689 }
7690
7691 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7692 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7693 }
7694
7695 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7696 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7697 }
7698
7699 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7700 uint8_t fwk_lensState = *lensState;
7701 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7702 }
7703
Thierry Strudel3d639192016-09-09 11:52:26 -07007704
7705 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007706 uint32_t ab_mode = *hal_ab_mode;
7707 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7708 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7709 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7710 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007711 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007712 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007713 if (NAME_NOT_FOUND != val) {
7714 uint8_t fwk_ab_mode = (uint8_t)val;
7715 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7716 }
7717 }
7718
7719 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7720 int val = lookupFwkName(SCENE_MODES_MAP,
7721 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7722 if (NAME_NOT_FOUND != val) {
7723 uint8_t fwkBestshotMode = (uint8_t)val;
7724 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7725 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7726 } else {
7727 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7728 }
7729 }
7730
7731 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7732 uint8_t fwk_mode = (uint8_t) *mode;
7733 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7734 }
7735
7736 /* Constant metadata values to be update*/
7737 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7738 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7739
7740 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7741 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7742
7743 int32_t hotPixelMap[2];
7744 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7745
7746 // CDS
7747 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7748 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7749 }
7750
Thierry Strudel04e026f2016-10-10 11:27:36 -07007751 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7752 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007753 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007754 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7755 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7756 } else {
7757 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7758 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007759
7760 if(fwk_hdr != curr_hdr_state) {
7761 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7762 if(fwk_hdr)
7763 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7764 else
7765 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7766 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007767 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7768 }
7769
Thierry Strudel54dc9782017-02-15 12:12:10 -08007770 //binning correction
7771 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7772 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7773 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7774 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7775 }
7776
Thierry Strudel04e026f2016-10-10 11:27:36 -07007777 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007778 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007779 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7780 int8_t is_ir_on = 0;
7781
7782 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7783 if(is_ir_on != curr_ir_state) {
7784 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7785 if(is_ir_on)
7786 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7787 else
7788 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7789 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007790 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007791 }
7792
Thierry Strudel269c81a2016-10-12 12:13:59 -07007793 // AEC SPEED
7794 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7795 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7796 }
7797
7798 // AWB SPEED
7799 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7800 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7801 }
7802
Thierry Strudel3d639192016-09-09 11:52:26 -07007803 // TNR
7804 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7805 uint8_t tnr_enable = tnr->denoise_enable;
7806 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007807 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7808 int8_t is_tnr_on = 0;
7809
7810 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7811 if(is_tnr_on != curr_tnr_state) {
7812 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7813 if(is_tnr_on)
7814 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7815 else
7816 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007818
7819 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7820 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7821 }
7822
7823 // Reprocess crop data
7824 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7825 uint8_t cnt = crop_data->num_of_streams;
7826 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7827 // mm-qcamera-daemon only posts crop_data for streams
7828 // not linked to pproc. So no valid crop metadata is not
7829 // necessarily an error case.
7830 LOGD("No valid crop metadata entries");
7831 } else {
7832 uint32_t reproc_stream_id;
7833 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7834 LOGD("No reprocessible stream found, ignore crop data");
7835 } else {
7836 int rc = NO_ERROR;
7837 Vector<int32_t> roi_map;
7838 int32_t *crop = new int32_t[cnt*4];
7839 if (NULL == crop) {
7840 rc = NO_MEMORY;
7841 }
7842 if (NO_ERROR == rc) {
7843 int32_t streams_found = 0;
7844 for (size_t i = 0; i < cnt; i++) {
7845 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7846 if (pprocDone) {
7847 // HAL already does internal reprocessing,
7848 // either via reprocessing before JPEG encoding,
7849 // or offline postprocessing for pproc bypass case.
7850 crop[0] = 0;
7851 crop[1] = 0;
7852 crop[2] = mInputStreamInfo.dim.width;
7853 crop[3] = mInputStreamInfo.dim.height;
7854 } else {
7855 crop[0] = crop_data->crop_info[i].crop.left;
7856 crop[1] = crop_data->crop_info[i].crop.top;
7857 crop[2] = crop_data->crop_info[i].crop.width;
7858 crop[3] = crop_data->crop_info[i].crop.height;
7859 }
7860 roi_map.add(crop_data->crop_info[i].roi_map.left);
7861 roi_map.add(crop_data->crop_info[i].roi_map.top);
7862 roi_map.add(crop_data->crop_info[i].roi_map.width);
7863 roi_map.add(crop_data->crop_info[i].roi_map.height);
7864 streams_found++;
7865 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7866 crop[0], crop[1], crop[2], crop[3]);
7867 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7868 crop_data->crop_info[i].roi_map.left,
7869 crop_data->crop_info[i].roi_map.top,
7870 crop_data->crop_info[i].roi_map.width,
7871 crop_data->crop_info[i].roi_map.height);
7872 break;
7873
7874 }
7875 }
7876 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7877 &streams_found, 1);
7878 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7879 crop, (size_t)(streams_found * 4));
7880 if (roi_map.array()) {
7881 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7882 roi_map.array(), roi_map.size());
7883 }
7884 }
7885 if (crop) {
7886 delete [] crop;
7887 }
7888 }
7889 }
7890 }
7891
7892 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7893 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7894 // so hardcoding the CAC result to OFF mode.
7895 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7896 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7897 } else {
7898 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7899 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7900 *cacMode);
7901 if (NAME_NOT_FOUND != val) {
7902 uint8_t resultCacMode = (uint8_t)val;
7903 // check whether CAC result from CB is equal to Framework set CAC mode
7904 // If not equal then set the CAC mode came in corresponding request
7905 if (fwk_cacMode != resultCacMode) {
7906 resultCacMode = fwk_cacMode;
7907 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007908 //Check if CAC is disabled by property
7909 if (m_cacModeDisabled) {
7910 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7911 }
7912
Thierry Strudel3d639192016-09-09 11:52:26 -07007913 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7914 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7915 } else {
7916 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7917 }
7918 }
7919 }
7920
7921 // Post blob of cam_cds_data through vendor tag.
7922 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7923 uint8_t cnt = cdsInfo->num_of_streams;
7924 cam_cds_data_t cdsDataOverride;
7925 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7926 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7927 cdsDataOverride.num_of_streams = 1;
7928 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7929 uint32_t reproc_stream_id;
7930 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7931 LOGD("No reprocessible stream found, ignore cds data");
7932 } else {
7933 for (size_t i = 0; i < cnt; i++) {
7934 if (cdsInfo->cds_info[i].stream_id ==
7935 reproc_stream_id) {
7936 cdsDataOverride.cds_info[0].cds_enable =
7937 cdsInfo->cds_info[i].cds_enable;
7938 break;
7939 }
7940 }
7941 }
7942 } else {
7943 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7944 }
7945 camMetadata.update(QCAMERA3_CDS_INFO,
7946 (uint8_t *)&cdsDataOverride,
7947 sizeof(cam_cds_data_t));
7948 }
7949
7950 // Ldaf calibration data
7951 if (!mLdafCalibExist) {
7952 IF_META_AVAILABLE(uint32_t, ldafCalib,
7953 CAM_INTF_META_LDAF_EXIF, metadata) {
7954 mLdafCalibExist = true;
7955 mLdafCalib[0] = ldafCalib[0];
7956 mLdafCalib[1] = ldafCalib[1];
7957 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7958 ldafCalib[0], ldafCalib[1]);
7959 }
7960 }
7961
Thierry Strudel54dc9782017-02-15 12:12:10 -08007962 // EXIF debug data through vendor tag
7963 /*
7964 * Mobicat Mask can assume 3 values:
7965 * 1 refers to Mobicat data,
7966 * 2 refers to Stats Debug and Exif Debug Data
7967 * 3 refers to Mobicat and Stats Debug Data
7968 * We want to make sure that we are sending Exif debug data
7969 * only when Mobicat Mask is 2.
7970 */
7971 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7972 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7973 (uint8_t *)(void *)mExifParams.debug_params,
7974 sizeof(mm_jpeg_debug_exif_params_t));
7975 }
7976
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007977 // Reprocess and DDM debug data through vendor tag
7978 cam_reprocess_info_t repro_info;
7979 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007980 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7981 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007982 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007983 }
7984 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7985 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007986 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007987 }
7988 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7989 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007990 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007991 }
7992 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7993 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007994 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007995 }
7996 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7997 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007998 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007999 }
8000 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008001 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008002 }
8003 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8004 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008005 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008006 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008007 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8008 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8009 }
8010 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8011 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8012 }
8013 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8014 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008015
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008016 // INSTANT AEC MODE
8017 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8018 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8019 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8020 }
8021
Shuzhen Wange763e802016-03-31 10:24:29 -07008022 // AF scene change
8023 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8024 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8025 }
8026
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008027 // Enable ZSL
8028 if (enableZsl != nullptr) {
8029 uint8_t value = *enableZsl ?
8030 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8031 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8032 }
8033
Xu Han821ea9c2017-05-23 09:00:40 -07008034 // OIS Data
8035 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8036 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8037 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8038 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8039 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8040 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8041 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8042 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8043 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8044 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8045 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8046 }
8047
Thierry Strudel3d639192016-09-09 11:52:26 -07008048 resultMetadata = camMetadata.release();
8049 return resultMetadata;
8050}
8051
8052/*===========================================================================
8053 * FUNCTION : saveExifParams
8054 *
8055 * DESCRIPTION:
8056 *
8057 * PARAMETERS :
8058 * @metadata : metadata information from callback
8059 *
8060 * RETURN : none
8061 *
8062 *==========================================================================*/
8063void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8064{
8065 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8066 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8067 if (mExifParams.debug_params) {
8068 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8069 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8070 }
8071 }
8072 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8073 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8074 if (mExifParams.debug_params) {
8075 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8076 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8077 }
8078 }
8079 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8080 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8081 if (mExifParams.debug_params) {
8082 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8083 mExifParams.debug_params->af_debug_params_valid = TRUE;
8084 }
8085 }
8086 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8087 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8088 if (mExifParams.debug_params) {
8089 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8090 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8091 }
8092 }
8093 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8094 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8095 if (mExifParams.debug_params) {
8096 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8097 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8098 }
8099 }
8100 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8101 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8102 if (mExifParams.debug_params) {
8103 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8104 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8105 }
8106 }
8107 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8108 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8109 if (mExifParams.debug_params) {
8110 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8111 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8112 }
8113 }
8114 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8115 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8116 if (mExifParams.debug_params) {
8117 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8118 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8119 }
8120 }
8121}
8122
8123/*===========================================================================
8124 * FUNCTION : get3AExifParams
8125 *
8126 * DESCRIPTION:
8127 *
8128 * PARAMETERS : none
8129 *
8130 *
8131 * RETURN : mm_jpeg_exif_params_t
8132 *
8133 *==========================================================================*/
8134mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8135{
8136 return mExifParams;
8137}
8138
8139/*===========================================================================
8140 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8141 *
8142 * DESCRIPTION:
8143 *
8144 * PARAMETERS :
8145 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008146 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8147 * urgent metadata in a batch. Always true for
8148 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008149 *
8150 * RETURN : camera_metadata_t*
8151 * metadata in a format specified by fwk
8152 *==========================================================================*/
8153camera_metadata_t*
8154QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008155 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008156{
8157 CameraMetadata camMetadata;
8158 camera_metadata_t *resultMetadata;
8159
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008160 if (!lastUrgentMetadataInBatch) {
8161 /* In batch mode, use empty metadata if this is not the last in batch
8162 */
8163 resultMetadata = allocate_camera_metadata(0, 0);
8164 return resultMetadata;
8165 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008166
8167 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8168 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8169 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8170 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8171 }
8172
8173 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8174 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8175 &aecTrigger->trigger, 1);
8176 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8177 &aecTrigger->trigger_id, 1);
8178 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8179 aecTrigger->trigger);
8180 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8181 aecTrigger->trigger_id);
8182 }
8183
8184 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8185 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8186 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8187 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8188 }
8189
Thierry Strudel3d639192016-09-09 11:52:26 -07008190 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8191 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8192 &af_trigger->trigger, 1);
8193 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8194 af_trigger->trigger);
8195 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8196 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8197 af_trigger->trigger_id);
8198 }
8199
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008200 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8201 /*af regions*/
8202 int32_t afRegions[REGIONS_TUPLE_COUNT];
8203 // Adjust crop region from sensor output coordinate system to active
8204 // array coordinate system.
8205 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8206 hAfRegions->rect.width, hAfRegions->rect.height);
8207
8208 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8209 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8210 REGIONS_TUPLE_COUNT);
8211 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8212 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8213 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8214 hAfRegions->rect.height);
8215 }
8216
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008217 // AF region confidence
8218 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8219 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8220 }
8221
Thierry Strudel3d639192016-09-09 11:52:26 -07008222 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8223 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8224 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8225 if (NAME_NOT_FOUND != val) {
8226 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8227 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8228 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8229 } else {
8230 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8231 }
8232 }
8233
8234 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8235 uint32_t aeMode = CAM_AE_MODE_MAX;
8236 int32_t flashMode = CAM_FLASH_MODE_MAX;
8237 int32_t redeye = -1;
8238 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8239 aeMode = *pAeMode;
8240 }
8241 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8242 flashMode = *pFlashMode;
8243 }
8244 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8245 redeye = *pRedeye;
8246 }
8247
8248 if (1 == redeye) {
8249 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8250 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8251 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8252 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8253 flashMode);
8254 if (NAME_NOT_FOUND != val) {
8255 fwk_aeMode = (uint8_t)val;
8256 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8257 } else {
8258 LOGE("Unsupported flash mode %d", flashMode);
8259 }
8260 } else if (aeMode == CAM_AE_MODE_ON) {
8261 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8262 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8263 } else if (aeMode == CAM_AE_MODE_OFF) {
8264 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8265 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008266 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8267 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8268 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008269 } else {
8270 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8271 "flashMode:%d, aeMode:%u!!!",
8272 redeye, flashMode, aeMode);
8273 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008274 if (mInstantAEC) {
8275 // Increment frame Idx count untill a bound reached for instant AEC.
8276 mInstantAecFrameIdxCount++;
8277 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8278 CAM_INTF_META_AEC_INFO, metadata) {
8279 LOGH("ae_params->settled = %d",ae_params->settled);
8280 // If AEC settled, or if number of frames reached bound value,
8281 // should reset instant AEC.
8282 if (ae_params->settled ||
8283 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8284 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8285 mInstantAEC = false;
8286 mResetInstantAEC = true;
8287 mInstantAecFrameIdxCount = 0;
8288 }
8289 }
8290 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008291 resultMetadata = camMetadata.release();
8292 return resultMetadata;
8293}
8294
8295/*===========================================================================
8296 * FUNCTION : dumpMetadataToFile
8297 *
8298 * DESCRIPTION: Dumps tuning metadata to file system
8299 *
8300 * PARAMETERS :
8301 * @meta : tuning metadata
8302 * @dumpFrameCount : current dump frame count
8303 * @enabled : Enable mask
8304 *
8305 *==========================================================================*/
8306void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8307 uint32_t &dumpFrameCount,
8308 bool enabled,
8309 const char *type,
8310 uint32_t frameNumber)
8311{
8312 //Some sanity checks
8313 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8314 LOGE("Tuning sensor data size bigger than expected %d: %d",
8315 meta.tuning_sensor_data_size,
8316 TUNING_SENSOR_DATA_MAX);
8317 return;
8318 }
8319
8320 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8321 LOGE("Tuning VFE data size bigger than expected %d: %d",
8322 meta.tuning_vfe_data_size,
8323 TUNING_VFE_DATA_MAX);
8324 return;
8325 }
8326
8327 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8328 LOGE("Tuning CPP data size bigger than expected %d: %d",
8329 meta.tuning_cpp_data_size,
8330 TUNING_CPP_DATA_MAX);
8331 return;
8332 }
8333
8334 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8335 LOGE("Tuning CAC data size bigger than expected %d: %d",
8336 meta.tuning_cac_data_size,
8337 TUNING_CAC_DATA_MAX);
8338 return;
8339 }
8340 //
8341
8342 if(enabled){
8343 char timeBuf[FILENAME_MAX];
8344 char buf[FILENAME_MAX];
8345 memset(buf, 0, sizeof(buf));
8346 memset(timeBuf, 0, sizeof(timeBuf));
8347 time_t current_time;
8348 struct tm * timeinfo;
8349 time (&current_time);
8350 timeinfo = localtime (&current_time);
8351 if (timeinfo != NULL) {
8352 strftime (timeBuf, sizeof(timeBuf),
8353 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8354 }
8355 String8 filePath(timeBuf);
8356 snprintf(buf,
8357 sizeof(buf),
8358 "%dm_%s_%d.bin",
8359 dumpFrameCount,
8360 type,
8361 frameNumber);
8362 filePath.append(buf);
8363 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8364 if (file_fd >= 0) {
8365 ssize_t written_len = 0;
8366 meta.tuning_data_version = TUNING_DATA_VERSION;
8367 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8368 written_len += write(file_fd, data, sizeof(uint32_t));
8369 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8370 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8371 written_len += write(file_fd, data, sizeof(uint32_t));
8372 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8373 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8374 written_len += write(file_fd, data, sizeof(uint32_t));
8375 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8376 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8377 written_len += write(file_fd, data, sizeof(uint32_t));
8378 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8379 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8380 written_len += write(file_fd, data, sizeof(uint32_t));
8381 meta.tuning_mod3_data_size = 0;
8382 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8383 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8384 written_len += write(file_fd, data, sizeof(uint32_t));
8385 size_t total_size = meta.tuning_sensor_data_size;
8386 data = (void *)((uint8_t *)&meta.data);
8387 written_len += write(file_fd, data, total_size);
8388 total_size = meta.tuning_vfe_data_size;
8389 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8390 written_len += write(file_fd, data, total_size);
8391 total_size = meta.tuning_cpp_data_size;
8392 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8393 written_len += write(file_fd, data, total_size);
8394 total_size = meta.tuning_cac_data_size;
8395 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8396 written_len += write(file_fd, data, total_size);
8397 close(file_fd);
8398 }else {
8399 LOGE("fail to open file for metadata dumping");
8400 }
8401 }
8402}
8403
8404/*===========================================================================
8405 * FUNCTION : cleanAndSortStreamInfo
8406 *
8407 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8408 * and sort them such that raw stream is at the end of the list
8409 * This is a workaround for camera daemon constraint.
8410 *
8411 * PARAMETERS : None
8412 *
8413 *==========================================================================*/
8414void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8415{
8416 List<stream_info_t *> newStreamInfo;
8417
8418 /*clean up invalid streams*/
8419 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8420 it != mStreamInfo.end();) {
8421 if(((*it)->status) == INVALID){
8422 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8423 delete channel;
8424 free(*it);
8425 it = mStreamInfo.erase(it);
8426 } else {
8427 it++;
8428 }
8429 }
8430
8431 // Move preview/video/callback/snapshot streams into newList
8432 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8433 it != mStreamInfo.end();) {
8434 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8435 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8436 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8437 newStreamInfo.push_back(*it);
8438 it = mStreamInfo.erase(it);
8439 } else
8440 it++;
8441 }
8442 // Move raw streams into newList
8443 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8444 it != mStreamInfo.end();) {
8445 newStreamInfo.push_back(*it);
8446 it = mStreamInfo.erase(it);
8447 }
8448
8449 mStreamInfo = newStreamInfo;
8450}
8451
8452/*===========================================================================
8453 * FUNCTION : extractJpegMetadata
8454 *
8455 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8456 * JPEG metadata is cached in HAL, and return as part of capture
8457 * result when metadata is returned from camera daemon.
8458 *
8459 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8460 * @request: capture request
8461 *
8462 *==========================================================================*/
8463void QCamera3HardwareInterface::extractJpegMetadata(
8464 CameraMetadata& jpegMetadata,
8465 const camera3_capture_request_t *request)
8466{
8467 CameraMetadata frame_settings;
8468 frame_settings = request->settings;
8469
8470 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8471 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8472 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8473 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8474
8475 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8476 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8477 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8478 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8479
8480 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8481 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8482 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8483 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8484
8485 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8486 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8487 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8488 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8489
8490 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8491 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8492 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8493 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8494
8495 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8496 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8497 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8498 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8499
8500 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8501 int32_t thumbnail_size[2];
8502 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8503 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8504 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8505 int32_t orientation =
8506 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008507 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008508 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8509 int32_t temp;
8510 temp = thumbnail_size[0];
8511 thumbnail_size[0] = thumbnail_size[1];
8512 thumbnail_size[1] = temp;
8513 }
8514 }
8515 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8516 thumbnail_size,
8517 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8518 }
8519
8520}
8521
8522/*===========================================================================
8523 * FUNCTION : convertToRegions
8524 *
8525 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8526 *
8527 * PARAMETERS :
8528 * @rect : cam_rect_t struct to convert
8529 * @region : int32_t destination array
8530 * @weight : if we are converting from cam_area_t, weight is valid
8531 * else weight = -1
8532 *
8533 *==========================================================================*/
8534void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8535 int32_t *region, int weight)
8536{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008537 region[FACE_LEFT] = rect.left;
8538 region[FACE_TOP] = rect.top;
8539 region[FACE_RIGHT] = rect.left + rect.width;
8540 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008541 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008542 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008543 }
8544}
8545
8546/*===========================================================================
8547 * FUNCTION : convertFromRegions
8548 *
8549 * DESCRIPTION: helper method to convert from array to cam_rect_t
8550 *
8551 * PARAMETERS :
8552 * @rect : cam_rect_t struct to convert
8553 * @region : int32_t destination array
8554 * @weight : if we are converting from cam_area_t, weight is valid
8555 * else weight = -1
8556 *
8557 *==========================================================================*/
8558void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008559 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008560{
Thierry Strudel3d639192016-09-09 11:52:26 -07008561 int32_t x_min = frame_settings.find(tag).data.i32[0];
8562 int32_t y_min = frame_settings.find(tag).data.i32[1];
8563 int32_t x_max = frame_settings.find(tag).data.i32[2];
8564 int32_t y_max = frame_settings.find(tag).data.i32[3];
8565 roi.weight = frame_settings.find(tag).data.i32[4];
8566 roi.rect.left = x_min;
8567 roi.rect.top = y_min;
8568 roi.rect.width = x_max - x_min;
8569 roi.rect.height = y_max - y_min;
8570}
8571
8572/*===========================================================================
8573 * FUNCTION : resetIfNeededROI
8574 *
8575 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8576 * crop region
8577 *
8578 * PARAMETERS :
8579 * @roi : cam_area_t struct to resize
8580 * @scalerCropRegion : cam_crop_region_t region to compare against
8581 *
8582 *
8583 *==========================================================================*/
8584bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8585 const cam_crop_region_t* scalerCropRegion)
8586{
8587 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8588 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8589 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8590 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8591
8592 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8593 * without having this check the calculations below to validate if the roi
8594 * is inside scalar crop region will fail resulting in the roi not being
8595 * reset causing algorithm to continue to use stale roi window
8596 */
8597 if (roi->weight == 0) {
8598 return true;
8599 }
8600
8601 if ((roi_x_max < scalerCropRegion->left) ||
8602 // right edge of roi window is left of scalar crop's left edge
8603 (roi_y_max < scalerCropRegion->top) ||
8604 // bottom edge of roi window is above scalar crop's top edge
8605 (roi->rect.left > crop_x_max) ||
8606 // left edge of roi window is beyond(right) of scalar crop's right edge
8607 (roi->rect.top > crop_y_max)){
8608 // top edge of roi windo is above scalar crop's top edge
8609 return false;
8610 }
8611 if (roi->rect.left < scalerCropRegion->left) {
8612 roi->rect.left = scalerCropRegion->left;
8613 }
8614 if (roi->rect.top < scalerCropRegion->top) {
8615 roi->rect.top = scalerCropRegion->top;
8616 }
8617 if (roi_x_max > crop_x_max) {
8618 roi_x_max = crop_x_max;
8619 }
8620 if (roi_y_max > crop_y_max) {
8621 roi_y_max = crop_y_max;
8622 }
8623 roi->rect.width = roi_x_max - roi->rect.left;
8624 roi->rect.height = roi_y_max - roi->rect.top;
8625 return true;
8626}
8627
8628/*===========================================================================
8629 * FUNCTION : convertLandmarks
8630 *
8631 * DESCRIPTION: helper method to extract the landmarks from face detection info
8632 *
8633 * PARAMETERS :
8634 * @landmark_data : input landmark data to be converted
8635 * @landmarks : int32_t destination array
8636 *
8637 *
8638 *==========================================================================*/
8639void QCamera3HardwareInterface::convertLandmarks(
8640 cam_face_landmarks_info_t landmark_data,
8641 int32_t *landmarks)
8642{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008643 if (landmark_data.is_left_eye_valid) {
8644 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8645 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8646 } else {
8647 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8648 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8649 }
8650
8651 if (landmark_data.is_right_eye_valid) {
8652 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8653 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8654 } else {
8655 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8656 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8657 }
8658
8659 if (landmark_data.is_mouth_valid) {
8660 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8661 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8662 } else {
8663 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8664 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8665 }
8666}
8667
8668/*===========================================================================
8669 * FUNCTION : setInvalidLandmarks
8670 *
8671 * DESCRIPTION: helper method to set invalid landmarks
8672 *
8673 * PARAMETERS :
8674 * @landmarks : int32_t destination array
8675 *
8676 *
8677 *==========================================================================*/
8678void QCamera3HardwareInterface::setInvalidLandmarks(
8679 int32_t *landmarks)
8680{
8681 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8682 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8683 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8684 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8685 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8686 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008687}
8688
8689#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008690
8691/*===========================================================================
8692 * FUNCTION : getCapabilities
8693 *
8694 * DESCRIPTION: query camera capability from back-end
8695 *
8696 * PARAMETERS :
8697 * @ops : mm-interface ops structure
8698 * @cam_handle : camera handle for which we need capability
8699 *
8700 * RETURN : ptr type of capability structure
8701 * capability for success
8702 * NULL for failure
8703 *==========================================================================*/
8704cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8705 uint32_t cam_handle)
8706{
8707 int rc = NO_ERROR;
8708 QCamera3HeapMemory *capabilityHeap = NULL;
8709 cam_capability_t *cap_ptr = NULL;
8710
8711 if (ops == NULL) {
8712 LOGE("Invalid arguments");
8713 return NULL;
8714 }
8715
8716 capabilityHeap = new QCamera3HeapMemory(1);
8717 if (capabilityHeap == NULL) {
8718 LOGE("creation of capabilityHeap failed");
8719 return NULL;
8720 }
8721
8722 /* Allocate memory for capability buffer */
8723 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8724 if(rc != OK) {
8725 LOGE("No memory for cappability");
8726 goto allocate_failed;
8727 }
8728
8729 /* Map memory for capability buffer */
8730 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8731
8732 rc = ops->map_buf(cam_handle,
8733 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8734 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8735 if(rc < 0) {
8736 LOGE("failed to map capability buffer");
8737 rc = FAILED_TRANSACTION;
8738 goto map_failed;
8739 }
8740
8741 /* Query Capability */
8742 rc = ops->query_capability(cam_handle);
8743 if(rc < 0) {
8744 LOGE("failed to query capability");
8745 rc = FAILED_TRANSACTION;
8746 goto query_failed;
8747 }
8748
8749 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8750 if (cap_ptr == NULL) {
8751 LOGE("out of memory");
8752 rc = NO_MEMORY;
8753 goto query_failed;
8754 }
8755
8756 memset(cap_ptr, 0, sizeof(cam_capability_t));
8757 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8758
8759 int index;
8760 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8761 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8762 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8763 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8764 }
8765
8766query_failed:
8767 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8768map_failed:
8769 capabilityHeap->deallocate();
8770allocate_failed:
8771 delete capabilityHeap;
8772
8773 if (rc != NO_ERROR) {
8774 return NULL;
8775 } else {
8776 return cap_ptr;
8777 }
8778}
8779
Thierry Strudel3d639192016-09-09 11:52:26 -07008780/*===========================================================================
8781 * FUNCTION : initCapabilities
8782 *
8783 * DESCRIPTION: initialize camera capabilities in static data struct
8784 *
8785 * PARAMETERS :
8786 * @cameraId : camera Id
8787 *
8788 * RETURN : int32_t type of status
8789 * NO_ERROR -- success
8790 * none-zero failure code
8791 *==========================================================================*/
8792int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8793{
8794 int rc = 0;
8795 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008796 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008797
8798 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8799 if (rc) {
8800 LOGE("camera_open failed. rc = %d", rc);
8801 goto open_failed;
8802 }
8803 if (!cameraHandle) {
8804 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8805 goto open_failed;
8806 }
8807
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008808 handle = get_main_camera_handle(cameraHandle->camera_handle);
8809 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8810 if (gCamCapability[cameraId] == NULL) {
8811 rc = FAILED_TRANSACTION;
8812 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008813 }
8814
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008815 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008816 if (is_dual_camera_by_idx(cameraId)) {
8817 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8818 gCamCapability[cameraId]->aux_cam_cap =
8819 getCapabilities(cameraHandle->ops, handle);
8820 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8821 rc = FAILED_TRANSACTION;
8822 free(gCamCapability[cameraId]);
8823 goto failed_op;
8824 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008825
8826 // Copy the main camera capability to main_cam_cap struct
8827 gCamCapability[cameraId]->main_cam_cap =
8828 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8829 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8830 LOGE("out of memory");
8831 rc = NO_MEMORY;
8832 goto failed_op;
8833 }
8834 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8835 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008836 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008837failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008838 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8839 cameraHandle = NULL;
8840open_failed:
8841 return rc;
8842}
8843
8844/*==========================================================================
8845 * FUNCTION : get3Aversion
8846 *
8847 * DESCRIPTION: get the Q3A S/W version
8848 *
8849 * PARAMETERS :
8850 * @sw_version: Reference of Q3A structure which will hold version info upon
8851 * return
8852 *
8853 * RETURN : None
8854 *
8855 *==========================================================================*/
8856void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8857{
8858 if(gCamCapability[mCameraId])
8859 sw_version = gCamCapability[mCameraId]->q3a_version;
8860 else
8861 LOGE("Capability structure NULL!");
8862}
8863
8864
8865/*===========================================================================
8866 * FUNCTION : initParameters
8867 *
8868 * DESCRIPTION: initialize camera parameters
8869 *
8870 * PARAMETERS :
8871 *
8872 * RETURN : int32_t type of status
8873 * NO_ERROR -- success
8874 * none-zero failure code
8875 *==========================================================================*/
8876int QCamera3HardwareInterface::initParameters()
8877{
8878 int rc = 0;
8879
8880 //Allocate Set Param Buffer
8881 mParamHeap = new QCamera3HeapMemory(1);
8882 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8883 if(rc != OK) {
8884 rc = NO_MEMORY;
8885 LOGE("Failed to allocate SETPARM Heap memory");
8886 delete mParamHeap;
8887 mParamHeap = NULL;
8888 return rc;
8889 }
8890
8891 //Map memory for parameters buffer
8892 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8893 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8894 mParamHeap->getFd(0),
8895 sizeof(metadata_buffer_t),
8896 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8897 if(rc < 0) {
8898 LOGE("failed to map SETPARM buffer");
8899 rc = FAILED_TRANSACTION;
8900 mParamHeap->deallocate();
8901 delete mParamHeap;
8902 mParamHeap = NULL;
8903 return rc;
8904 }
8905
8906 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8907
8908 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8909 return rc;
8910}
8911
8912/*===========================================================================
8913 * FUNCTION : deinitParameters
8914 *
8915 * DESCRIPTION: de-initialize camera parameters
8916 *
8917 * PARAMETERS :
8918 *
8919 * RETURN : NONE
8920 *==========================================================================*/
8921void QCamera3HardwareInterface::deinitParameters()
8922{
8923 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8924 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8925
8926 mParamHeap->deallocate();
8927 delete mParamHeap;
8928 mParamHeap = NULL;
8929
8930 mParameters = NULL;
8931
8932 free(mPrevParameters);
8933 mPrevParameters = NULL;
8934}
8935
8936/*===========================================================================
8937 * FUNCTION : calcMaxJpegSize
8938 *
8939 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8940 *
8941 * PARAMETERS :
8942 *
8943 * RETURN : max_jpeg_size
8944 *==========================================================================*/
8945size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8946{
8947 size_t max_jpeg_size = 0;
8948 size_t temp_width, temp_height;
8949 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8950 MAX_SIZES_CNT);
8951 for (size_t i = 0; i < count; i++) {
8952 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8953 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8954 if (temp_width * temp_height > max_jpeg_size ) {
8955 max_jpeg_size = temp_width * temp_height;
8956 }
8957 }
8958 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8959 return max_jpeg_size;
8960}
8961
8962/*===========================================================================
8963 * FUNCTION : getMaxRawSize
8964 *
8965 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8966 *
8967 * PARAMETERS :
8968 *
8969 * RETURN : Largest supported Raw Dimension
8970 *==========================================================================*/
8971cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8972{
8973 int max_width = 0;
8974 cam_dimension_t maxRawSize;
8975
8976 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8977 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8978 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8979 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8980 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8981 }
8982 }
8983 return maxRawSize;
8984}
8985
8986
8987/*===========================================================================
8988 * FUNCTION : calcMaxJpegDim
8989 *
8990 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8991 *
8992 * PARAMETERS :
8993 *
8994 * RETURN : max_jpeg_dim
8995 *==========================================================================*/
8996cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8997{
8998 cam_dimension_t max_jpeg_dim;
8999 cam_dimension_t curr_jpeg_dim;
9000 max_jpeg_dim.width = 0;
9001 max_jpeg_dim.height = 0;
9002 curr_jpeg_dim.width = 0;
9003 curr_jpeg_dim.height = 0;
9004 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9005 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9006 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9007 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9008 max_jpeg_dim.width * max_jpeg_dim.height ) {
9009 max_jpeg_dim.width = curr_jpeg_dim.width;
9010 max_jpeg_dim.height = curr_jpeg_dim.height;
9011 }
9012 }
9013 return max_jpeg_dim;
9014}
9015
9016/*===========================================================================
9017 * FUNCTION : addStreamConfig
9018 *
9019 * DESCRIPTION: adds the stream configuration to the array
9020 *
9021 * PARAMETERS :
9022 * @available_stream_configs : pointer to stream configuration array
9023 * @scalar_format : scalar format
9024 * @dim : configuration dimension
9025 * @config_type : input or output configuration type
9026 *
9027 * RETURN : NONE
9028 *==========================================================================*/
9029void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9030 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9031{
9032 available_stream_configs.add(scalar_format);
9033 available_stream_configs.add(dim.width);
9034 available_stream_configs.add(dim.height);
9035 available_stream_configs.add(config_type);
9036}
9037
9038/*===========================================================================
9039 * FUNCTION : suppportBurstCapture
9040 *
9041 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9042 *
9043 * PARAMETERS :
9044 * @cameraId : camera Id
9045 *
9046 * RETURN : true if camera supports BURST_CAPTURE
9047 * false otherwise
9048 *==========================================================================*/
9049bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9050{
9051 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9052 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9053 const int32_t highResWidth = 3264;
9054 const int32_t highResHeight = 2448;
9055
9056 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9057 // Maximum resolution images cannot be captured at >= 10fps
9058 // -> not supporting BURST_CAPTURE
9059 return false;
9060 }
9061
9062 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9063 // Maximum resolution images can be captured at >= 20fps
9064 // --> supporting BURST_CAPTURE
9065 return true;
9066 }
9067
9068 // Find the smallest highRes resolution, or largest resolution if there is none
9069 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9070 MAX_SIZES_CNT);
9071 size_t highRes = 0;
9072 while ((highRes + 1 < totalCnt) &&
9073 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9074 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9075 highResWidth * highResHeight)) {
9076 highRes++;
9077 }
9078 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9079 return true;
9080 } else {
9081 return false;
9082 }
9083}
9084
9085/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009086 * FUNCTION : getPDStatIndex
9087 *
9088 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9089 *
9090 * PARAMETERS :
9091 * @caps : camera capabilities
9092 *
9093 * RETURN : int32_t type
9094 * non-negative - on success
9095 * -1 - on failure
9096 *==========================================================================*/
9097int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9098 if (nullptr == caps) {
9099 return -1;
9100 }
9101
9102 uint32_t metaRawCount = caps->meta_raw_channel_count;
9103 int32_t ret = -1;
9104 for (size_t i = 0; i < metaRawCount; i++) {
9105 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9106 ret = i;
9107 break;
9108 }
9109 }
9110
9111 return ret;
9112}
9113
9114/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009115 * FUNCTION : initStaticMetadata
9116 *
9117 * DESCRIPTION: initialize the static metadata
9118 *
9119 * PARAMETERS :
9120 * @cameraId : camera Id
9121 *
9122 * RETURN : int32_t type of status
9123 * 0 -- success
9124 * non-zero failure code
9125 *==========================================================================*/
9126int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9127{
9128 int rc = 0;
9129 CameraMetadata staticInfo;
9130 size_t count = 0;
9131 bool limitedDevice = false;
9132 char prop[PROPERTY_VALUE_MAX];
9133 bool supportBurst = false;
9134
9135 supportBurst = supportBurstCapture(cameraId);
9136
9137 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9138 * guaranteed or if min fps of max resolution is less than 20 fps, its
9139 * advertised as limited device*/
9140 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9141 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9142 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9143 !supportBurst;
9144
9145 uint8_t supportedHwLvl = limitedDevice ?
9146 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009147#ifndef USE_HAL_3_3
9148 // LEVEL_3 - This device will support level 3.
9149 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9150#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009151 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009152#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009153
9154 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9155 &supportedHwLvl, 1);
9156
9157 bool facingBack = false;
9158 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9159 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9160 facingBack = true;
9161 }
9162 /*HAL 3 only*/
9163 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9164 &gCamCapability[cameraId]->min_focus_distance, 1);
9165
9166 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9167 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9168
9169 /*should be using focal lengths but sensor doesn't provide that info now*/
9170 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9171 &gCamCapability[cameraId]->focal_length,
9172 1);
9173
9174 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9175 gCamCapability[cameraId]->apertures,
9176 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9177
9178 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9179 gCamCapability[cameraId]->filter_densities,
9180 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9181
9182
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009183 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9184 size_t mode_count =
9185 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9186 for (size_t i = 0; i < mode_count; i++) {
9187 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9188 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009189 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009190 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009191
9192 int32_t lens_shading_map_size[] = {
9193 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9194 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9195 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9196 lens_shading_map_size,
9197 sizeof(lens_shading_map_size)/sizeof(int32_t));
9198
9199 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9200 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9201
9202 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9203 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9204
9205 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9206 &gCamCapability[cameraId]->max_frame_duration, 1);
9207
9208 camera_metadata_rational baseGainFactor = {
9209 gCamCapability[cameraId]->base_gain_factor.numerator,
9210 gCamCapability[cameraId]->base_gain_factor.denominator};
9211 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9212 &baseGainFactor, 1);
9213
9214 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9215 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9216
9217 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9218 gCamCapability[cameraId]->pixel_array_size.height};
9219 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9220 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9221
9222 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9223 gCamCapability[cameraId]->active_array_size.top,
9224 gCamCapability[cameraId]->active_array_size.width,
9225 gCamCapability[cameraId]->active_array_size.height};
9226 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9227 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9228
9229 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9230 &gCamCapability[cameraId]->white_level, 1);
9231
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009232 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9233 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9234 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009235 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009236 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009237
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009238#ifndef USE_HAL_3_3
9239 bool hasBlackRegions = false;
9240 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9241 LOGW("black_region_count: %d is bounded to %d",
9242 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9243 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9244 }
9245 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9246 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9247 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9248 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9249 }
9250 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9251 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9252 hasBlackRegions = true;
9253 }
9254#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009255 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9256 &gCamCapability[cameraId]->flash_charge_duration, 1);
9257
9258 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9259 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9260
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009261 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9262 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9263 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009264 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9265 &timestampSource, 1);
9266
Thierry Strudel54dc9782017-02-15 12:12:10 -08009267 //update histogram vendor data
9268 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009269 &gCamCapability[cameraId]->histogram_size, 1);
9270
Thierry Strudel54dc9782017-02-15 12:12:10 -08009271 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009272 &gCamCapability[cameraId]->max_histogram_count, 1);
9273
Shuzhen Wang14415f52016-11-16 18:26:18 -08009274 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9275 //so that app can request fewer number of bins than the maximum supported.
9276 std::vector<int32_t> histBins;
9277 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9278 histBins.push_back(maxHistBins);
9279 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9280 (maxHistBins & 0x1) == 0) {
9281 histBins.push_back(maxHistBins >> 1);
9282 maxHistBins >>= 1;
9283 }
9284 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9285 histBins.data(), histBins.size());
9286
Thierry Strudel3d639192016-09-09 11:52:26 -07009287 int32_t sharpness_map_size[] = {
9288 gCamCapability[cameraId]->sharpness_map_size.width,
9289 gCamCapability[cameraId]->sharpness_map_size.height};
9290
9291 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9292 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9293
9294 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9295 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9296
Emilian Peev0f3c3162017-03-15 12:57:46 +00009297 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9298 if (0 <= indexPD) {
9299 // Advertise PD stats data as part of the Depth capabilities
9300 int32_t depthWidth =
9301 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9302 int32_t depthHeight =
9303 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009304 int32_t depthStride =
9305 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009306 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9307 assert(0 < depthSamplesCount);
9308 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9309 &depthSamplesCount, 1);
9310
9311 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9312 depthHeight,
9313 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9314 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9315 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9316 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9317 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9318
9319 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9320 depthHeight, 33333333,
9321 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9322 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9323 depthMinDuration,
9324 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9325
9326 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9327 depthHeight, 0,
9328 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9329 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9330 depthStallDuration,
9331 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9332
9333 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9334 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009335
9336 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9337 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9338 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009339 }
9340
Thierry Strudel3d639192016-09-09 11:52:26 -07009341 int32_t scalar_formats[] = {
9342 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9343 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9344 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9345 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9346 HAL_PIXEL_FORMAT_RAW10,
9347 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009348 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9349 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9350 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009351
9352 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9353 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9354 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9355 count, MAX_SIZES_CNT, available_processed_sizes);
9356 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9357 available_processed_sizes, count * 2);
9358
9359 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9360 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9361 makeTable(gCamCapability[cameraId]->raw_dim,
9362 count, MAX_SIZES_CNT, available_raw_sizes);
9363 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9364 available_raw_sizes, count * 2);
9365
9366 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9367 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9368 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9369 count, MAX_SIZES_CNT, available_fps_ranges);
9370 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9371 available_fps_ranges, count * 2);
9372
9373 camera_metadata_rational exposureCompensationStep = {
9374 gCamCapability[cameraId]->exp_compensation_step.numerator,
9375 gCamCapability[cameraId]->exp_compensation_step.denominator};
9376 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9377 &exposureCompensationStep, 1);
9378
9379 Vector<uint8_t> availableVstabModes;
9380 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9381 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009382 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009383 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009384 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009385 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009386 count = IS_TYPE_MAX;
9387 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9388 for (size_t i = 0; i < count; i++) {
9389 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9390 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9391 eisSupported = true;
9392 break;
9393 }
9394 }
9395 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009396 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9397 }
9398 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9399 availableVstabModes.array(), availableVstabModes.size());
9400
9401 /*HAL 1 and HAL 3 common*/
9402 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9403 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9404 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009405 // Cap the max zoom to the max preferred value
9406 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009407 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9408 &maxZoom, 1);
9409
9410 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9411 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9412
9413 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9414 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9415 max3aRegions[2] = 0; /* AF not supported */
9416 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9417 max3aRegions, 3);
9418
9419 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9420 memset(prop, 0, sizeof(prop));
9421 property_get("persist.camera.facedetect", prop, "1");
9422 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9423 LOGD("Support face detection mode: %d",
9424 supportedFaceDetectMode);
9425
9426 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009427 /* support mode should be OFF if max number of face is 0 */
9428 if (maxFaces <= 0) {
9429 supportedFaceDetectMode = 0;
9430 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009431 Vector<uint8_t> availableFaceDetectModes;
9432 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9433 if (supportedFaceDetectMode == 1) {
9434 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9435 } else if (supportedFaceDetectMode == 2) {
9436 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9437 } else if (supportedFaceDetectMode == 3) {
9438 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9439 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9440 } else {
9441 maxFaces = 0;
9442 }
9443 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9444 availableFaceDetectModes.array(),
9445 availableFaceDetectModes.size());
9446 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9447 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009448 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9449 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9450 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009451
9452 int32_t exposureCompensationRange[] = {
9453 gCamCapability[cameraId]->exposure_compensation_min,
9454 gCamCapability[cameraId]->exposure_compensation_max};
9455 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9456 exposureCompensationRange,
9457 sizeof(exposureCompensationRange)/sizeof(int32_t));
9458
9459 uint8_t lensFacing = (facingBack) ?
9460 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9461 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9462
9463 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9464 available_thumbnail_sizes,
9465 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9466
9467 /*all sizes will be clubbed into this tag*/
9468 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9469 /*android.scaler.availableStreamConfigurations*/
9470 Vector<int32_t> available_stream_configs;
9471 cam_dimension_t active_array_dim;
9472 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9473 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009474
9475 /*advertise list of input dimensions supported based on below property.
9476 By default all sizes upto 5MP will be advertised.
9477 Note that the setprop resolution format should be WxH.
9478 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9479 To list all supported sizes, setprop needs to be set with "0x0" */
9480 cam_dimension_t minInputSize = {2592,1944}; //5MP
9481 memset(prop, 0, sizeof(prop));
9482 property_get("persist.camera.input.minsize", prop, "2592x1944");
9483 if (strlen(prop) > 0) {
9484 char *saveptr = NULL;
9485 char *token = strtok_r(prop, "x", &saveptr);
9486 if (token != NULL) {
9487 minInputSize.width = atoi(token);
9488 }
9489 token = strtok_r(NULL, "x", &saveptr);
9490 if (token != NULL) {
9491 minInputSize.height = atoi(token);
9492 }
9493 }
9494
Thierry Strudel3d639192016-09-09 11:52:26 -07009495 /* Add input/output stream configurations for each scalar formats*/
9496 for (size_t j = 0; j < scalar_formats_count; j++) {
9497 switch (scalar_formats[j]) {
9498 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9499 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9500 case HAL_PIXEL_FORMAT_RAW10:
9501 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9502 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9503 addStreamConfig(available_stream_configs, scalar_formats[j],
9504 gCamCapability[cameraId]->raw_dim[i],
9505 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9506 }
9507 break;
9508 case HAL_PIXEL_FORMAT_BLOB:
9509 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9510 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9511 addStreamConfig(available_stream_configs, scalar_formats[j],
9512 gCamCapability[cameraId]->picture_sizes_tbl[i],
9513 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9514 }
9515 break;
9516 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9517 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9518 default:
9519 cam_dimension_t largest_picture_size;
9520 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9521 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9522 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9523 addStreamConfig(available_stream_configs, scalar_formats[j],
9524 gCamCapability[cameraId]->picture_sizes_tbl[i],
9525 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009526 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009527 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9528 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009529 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9530 >= minInputSize.width) || (gCamCapability[cameraId]->
9531 picture_sizes_tbl[i].height >= minInputSize.height)) {
9532 addStreamConfig(available_stream_configs, scalar_formats[j],
9533 gCamCapability[cameraId]->picture_sizes_tbl[i],
9534 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9535 }
9536 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009537 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009538
Thierry Strudel3d639192016-09-09 11:52:26 -07009539 break;
9540 }
9541 }
9542
9543 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9544 available_stream_configs.array(), available_stream_configs.size());
9545 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9546 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9547
9548 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9549 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9550
9551 /* android.scaler.availableMinFrameDurations */
9552 Vector<int64_t> available_min_durations;
9553 for (size_t j = 0; j < scalar_formats_count; j++) {
9554 switch (scalar_formats[j]) {
9555 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9556 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9557 case HAL_PIXEL_FORMAT_RAW10:
9558 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9559 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9560 available_min_durations.add(scalar_formats[j]);
9561 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9562 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9563 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9564 }
9565 break;
9566 default:
9567 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9568 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9569 available_min_durations.add(scalar_formats[j]);
9570 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9571 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9572 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9573 }
9574 break;
9575 }
9576 }
9577 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9578 available_min_durations.array(), available_min_durations.size());
9579
9580 Vector<int32_t> available_hfr_configs;
9581 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9582 int32_t fps = 0;
9583 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9584 case CAM_HFR_MODE_60FPS:
9585 fps = 60;
9586 break;
9587 case CAM_HFR_MODE_90FPS:
9588 fps = 90;
9589 break;
9590 case CAM_HFR_MODE_120FPS:
9591 fps = 120;
9592 break;
9593 case CAM_HFR_MODE_150FPS:
9594 fps = 150;
9595 break;
9596 case CAM_HFR_MODE_180FPS:
9597 fps = 180;
9598 break;
9599 case CAM_HFR_MODE_210FPS:
9600 fps = 210;
9601 break;
9602 case CAM_HFR_MODE_240FPS:
9603 fps = 240;
9604 break;
9605 case CAM_HFR_MODE_480FPS:
9606 fps = 480;
9607 break;
9608 case CAM_HFR_MODE_OFF:
9609 case CAM_HFR_MODE_MAX:
9610 default:
9611 break;
9612 }
9613
9614 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9615 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9616 /* For each HFR frame rate, need to advertise one variable fps range
9617 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9618 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9619 * set by the app. When video recording is started, [120, 120] is
9620 * set. This way sensor configuration does not change when recording
9621 * is started */
9622
9623 /* (width, height, fps_min, fps_max, batch_size_max) */
9624 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9625 j < MAX_SIZES_CNT; j++) {
9626 available_hfr_configs.add(
9627 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9628 available_hfr_configs.add(
9629 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9630 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9631 available_hfr_configs.add(fps);
9632 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9633
9634 /* (width, height, fps_min, fps_max, batch_size_max) */
9635 available_hfr_configs.add(
9636 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9637 available_hfr_configs.add(
9638 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9639 available_hfr_configs.add(fps);
9640 available_hfr_configs.add(fps);
9641 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9642 }
9643 }
9644 }
9645 //Advertise HFR capability only if the property is set
9646 memset(prop, 0, sizeof(prop));
9647 property_get("persist.camera.hal3hfr.enable", prop, "1");
9648 uint8_t hfrEnable = (uint8_t)atoi(prop);
9649
9650 if(hfrEnable && available_hfr_configs.array()) {
9651 staticInfo.update(
9652 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9653 available_hfr_configs.array(), available_hfr_configs.size());
9654 }
9655
9656 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9657 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9658 &max_jpeg_size, 1);
9659
9660 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9661 size_t size = 0;
9662 count = CAM_EFFECT_MODE_MAX;
9663 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9664 for (size_t i = 0; i < count; i++) {
9665 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9666 gCamCapability[cameraId]->supported_effects[i]);
9667 if (NAME_NOT_FOUND != val) {
9668 avail_effects[size] = (uint8_t)val;
9669 size++;
9670 }
9671 }
9672 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9673 avail_effects,
9674 size);
9675
9676 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9677 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9678 size_t supported_scene_modes_cnt = 0;
9679 count = CAM_SCENE_MODE_MAX;
9680 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9681 for (size_t i = 0; i < count; i++) {
9682 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9683 CAM_SCENE_MODE_OFF) {
9684 int val = lookupFwkName(SCENE_MODES_MAP,
9685 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9686 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009687
Thierry Strudel3d639192016-09-09 11:52:26 -07009688 if (NAME_NOT_FOUND != val) {
9689 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9690 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9691 supported_scene_modes_cnt++;
9692 }
9693 }
9694 }
9695 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9696 avail_scene_modes,
9697 supported_scene_modes_cnt);
9698
9699 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9700 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9701 supported_scene_modes_cnt,
9702 CAM_SCENE_MODE_MAX,
9703 scene_mode_overrides,
9704 supported_indexes,
9705 cameraId);
9706
9707 if (supported_scene_modes_cnt == 0) {
9708 supported_scene_modes_cnt = 1;
9709 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9710 }
9711
9712 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9713 scene_mode_overrides, supported_scene_modes_cnt * 3);
9714
9715 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9716 ANDROID_CONTROL_MODE_AUTO,
9717 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9718 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9719 available_control_modes,
9720 3);
9721
9722 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9723 size = 0;
9724 count = CAM_ANTIBANDING_MODE_MAX;
9725 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9726 for (size_t i = 0; i < count; i++) {
9727 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9728 gCamCapability[cameraId]->supported_antibandings[i]);
9729 if (NAME_NOT_FOUND != val) {
9730 avail_antibanding_modes[size] = (uint8_t)val;
9731 size++;
9732 }
9733
9734 }
9735 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9736 avail_antibanding_modes,
9737 size);
9738
9739 uint8_t avail_abberation_modes[] = {
9740 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9741 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9742 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9743 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9744 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9745 if (0 == count) {
9746 // If no aberration correction modes are available for a device, this advertise OFF mode
9747 size = 1;
9748 } else {
9749 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9750 // So, advertize all 3 modes if atleast any one mode is supported as per the
9751 // new M requirement
9752 size = 3;
9753 }
9754 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9755 avail_abberation_modes,
9756 size);
9757
9758 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9759 size = 0;
9760 count = CAM_FOCUS_MODE_MAX;
9761 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9762 for (size_t i = 0; i < count; i++) {
9763 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9764 gCamCapability[cameraId]->supported_focus_modes[i]);
9765 if (NAME_NOT_FOUND != val) {
9766 avail_af_modes[size] = (uint8_t)val;
9767 size++;
9768 }
9769 }
9770 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9771 avail_af_modes,
9772 size);
9773
9774 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9775 size = 0;
9776 count = CAM_WB_MODE_MAX;
9777 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9778 for (size_t i = 0; i < count; i++) {
9779 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9780 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9781 gCamCapability[cameraId]->supported_white_balances[i]);
9782 if (NAME_NOT_FOUND != val) {
9783 avail_awb_modes[size] = (uint8_t)val;
9784 size++;
9785 }
9786 }
9787 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9788 avail_awb_modes,
9789 size);
9790
9791 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9792 count = CAM_FLASH_FIRING_LEVEL_MAX;
9793 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9794 count);
9795 for (size_t i = 0; i < count; i++) {
9796 available_flash_levels[i] =
9797 gCamCapability[cameraId]->supported_firing_levels[i];
9798 }
9799 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9800 available_flash_levels, count);
9801
9802 uint8_t flashAvailable;
9803 if (gCamCapability[cameraId]->flash_available)
9804 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9805 else
9806 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9807 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9808 &flashAvailable, 1);
9809
9810 Vector<uint8_t> avail_ae_modes;
9811 count = CAM_AE_MODE_MAX;
9812 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9813 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009814 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9815 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9816 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9817 }
9818 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009819 }
9820 if (flashAvailable) {
9821 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9822 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9823 }
9824 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9825 avail_ae_modes.array(),
9826 avail_ae_modes.size());
9827
9828 int32_t sensitivity_range[2];
9829 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9830 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9831 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9832 sensitivity_range,
9833 sizeof(sensitivity_range) / sizeof(int32_t));
9834
9835 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9836 &gCamCapability[cameraId]->max_analog_sensitivity,
9837 1);
9838
9839 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9840 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9841 &sensor_orientation,
9842 1);
9843
9844 int32_t max_output_streams[] = {
9845 MAX_STALLING_STREAMS,
9846 MAX_PROCESSED_STREAMS,
9847 MAX_RAW_STREAMS};
9848 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9849 max_output_streams,
9850 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9851
9852 uint8_t avail_leds = 0;
9853 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9854 &avail_leds, 0);
9855
9856 uint8_t focus_dist_calibrated;
9857 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9858 gCamCapability[cameraId]->focus_dist_calibrated);
9859 if (NAME_NOT_FOUND != val) {
9860 focus_dist_calibrated = (uint8_t)val;
9861 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9862 &focus_dist_calibrated, 1);
9863 }
9864
9865 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9866 size = 0;
9867 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9868 MAX_TEST_PATTERN_CNT);
9869 for (size_t i = 0; i < count; i++) {
9870 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9871 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9872 if (NAME_NOT_FOUND != testpatternMode) {
9873 avail_testpattern_modes[size] = testpatternMode;
9874 size++;
9875 }
9876 }
9877 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9878 avail_testpattern_modes,
9879 size);
9880
9881 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9882 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9883 &max_pipeline_depth,
9884 1);
9885
9886 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9887 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9888 &partial_result_count,
9889 1);
9890
9891 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9892 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9893
9894 Vector<uint8_t> available_capabilities;
9895 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9896 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9897 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9898 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9899 if (supportBurst) {
9900 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9901 }
9902 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9903 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9904 if (hfrEnable && available_hfr_configs.array()) {
9905 available_capabilities.add(
9906 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9907 }
9908
9909 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9910 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9911 }
9912 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9913 available_capabilities.array(),
9914 available_capabilities.size());
9915
9916 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9917 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9918 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9919 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9920
9921 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9922 &aeLockAvailable, 1);
9923
9924 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9925 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9926 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9927 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9928
9929 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9930 &awbLockAvailable, 1);
9931
9932 int32_t max_input_streams = 1;
9933 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9934 &max_input_streams,
9935 1);
9936
9937 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9938 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9939 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9940 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9941 HAL_PIXEL_FORMAT_YCbCr_420_888};
9942 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9943 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9944
9945 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9946 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9947 &max_latency,
9948 1);
9949
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009950#ifndef USE_HAL_3_3
9951 int32_t isp_sensitivity_range[2];
9952 isp_sensitivity_range[0] =
9953 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9954 isp_sensitivity_range[1] =
9955 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9956 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9957 isp_sensitivity_range,
9958 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9959#endif
9960
Thierry Strudel3d639192016-09-09 11:52:26 -07009961 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9962 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9963 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9964 available_hot_pixel_modes,
9965 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9966
9967 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9968 ANDROID_SHADING_MODE_FAST,
9969 ANDROID_SHADING_MODE_HIGH_QUALITY};
9970 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9971 available_shading_modes,
9972 3);
9973
9974 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9975 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9976 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9977 available_lens_shading_map_modes,
9978 2);
9979
9980 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9981 ANDROID_EDGE_MODE_FAST,
9982 ANDROID_EDGE_MODE_HIGH_QUALITY,
9983 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9984 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9985 available_edge_modes,
9986 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9987
9988 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9989 ANDROID_NOISE_REDUCTION_MODE_FAST,
9990 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9991 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9992 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9993 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9994 available_noise_red_modes,
9995 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9996
9997 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9998 ANDROID_TONEMAP_MODE_FAST,
9999 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10000 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10001 available_tonemap_modes,
10002 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10003
10004 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10005 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10006 available_hot_pixel_map_modes,
10007 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10008
10009 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10010 gCamCapability[cameraId]->reference_illuminant1);
10011 if (NAME_NOT_FOUND != val) {
10012 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10013 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10014 }
10015
10016 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10017 gCamCapability[cameraId]->reference_illuminant2);
10018 if (NAME_NOT_FOUND != val) {
10019 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10020 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10021 }
10022
10023 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10024 (void *)gCamCapability[cameraId]->forward_matrix1,
10025 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10026
10027 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10028 (void *)gCamCapability[cameraId]->forward_matrix2,
10029 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10030
10031 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10032 (void *)gCamCapability[cameraId]->color_transform1,
10033 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10034
10035 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10036 (void *)gCamCapability[cameraId]->color_transform2,
10037 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10038
10039 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10040 (void *)gCamCapability[cameraId]->calibration_transform1,
10041 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10042
10043 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10044 (void *)gCamCapability[cameraId]->calibration_transform2,
10045 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10046
10047 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10048 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10049 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10050 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10051 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10052 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10053 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10054 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10055 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10056 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10057 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10058 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10059 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10060 ANDROID_JPEG_GPS_COORDINATES,
10061 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10062 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10063 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10064 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10065 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10066 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10067 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10068 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10069 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10070 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010071#ifndef USE_HAL_3_3
10072 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10073#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010074 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010075 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010076 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10077 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010078 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010079 /* DevCamDebug metadata request_keys_basic */
10080 DEVCAMDEBUG_META_ENABLE,
10081 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010082 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010083 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010084 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010085 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010086 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010087 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010088
10089 size_t request_keys_cnt =
10090 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10091 Vector<int32_t> available_request_keys;
10092 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10093 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10094 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10095 }
10096
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010097 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010098 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10099 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10100 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010101 }
10102
Thierry Strudel3d639192016-09-09 11:52:26 -070010103 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10104 available_request_keys.array(), available_request_keys.size());
10105
10106 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10107 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10108 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10109 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10110 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10111 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10112 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10113 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10114 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10115 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10116 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10117 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10118 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10119 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10120 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10121 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10122 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010123 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010124 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10125 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10126 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010127 ANDROID_STATISTICS_FACE_SCORES,
10128#ifndef USE_HAL_3_3
10129 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10130#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010131 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010132 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010133 // DevCamDebug metadata result_keys_basic
10134 DEVCAMDEBUG_META_ENABLE,
10135 // DevCamDebug metadata result_keys AF
10136 DEVCAMDEBUG_AF_LENS_POSITION,
10137 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10138 DEVCAMDEBUG_AF_TOF_DISTANCE,
10139 DEVCAMDEBUG_AF_LUMA,
10140 DEVCAMDEBUG_AF_HAF_STATE,
10141 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10142 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10143 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10144 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10145 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10146 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10147 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10148 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10149 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10150 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10151 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10152 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10153 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10154 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10155 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10156 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10157 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10158 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10159 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10160 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10161 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10162 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10163 // DevCamDebug metadata result_keys AEC
10164 DEVCAMDEBUG_AEC_TARGET_LUMA,
10165 DEVCAMDEBUG_AEC_COMP_LUMA,
10166 DEVCAMDEBUG_AEC_AVG_LUMA,
10167 DEVCAMDEBUG_AEC_CUR_LUMA,
10168 DEVCAMDEBUG_AEC_LINECOUNT,
10169 DEVCAMDEBUG_AEC_REAL_GAIN,
10170 DEVCAMDEBUG_AEC_EXP_INDEX,
10171 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010172 // DevCamDebug metadata result_keys zzHDR
10173 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10174 DEVCAMDEBUG_AEC_L_LINECOUNT,
10175 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10176 DEVCAMDEBUG_AEC_S_LINECOUNT,
10177 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10178 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10179 // DevCamDebug metadata result_keys ADRC
10180 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10181 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10182 DEVCAMDEBUG_AEC_GTM_RATIO,
10183 DEVCAMDEBUG_AEC_LTM_RATIO,
10184 DEVCAMDEBUG_AEC_LA_RATIO,
10185 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010186 // DevCamDebug metadata result_keys AWB
10187 DEVCAMDEBUG_AWB_R_GAIN,
10188 DEVCAMDEBUG_AWB_G_GAIN,
10189 DEVCAMDEBUG_AWB_B_GAIN,
10190 DEVCAMDEBUG_AWB_CCT,
10191 DEVCAMDEBUG_AWB_DECISION,
10192 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010193 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10194 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10195 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010196 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010197 };
10198
Thierry Strudel3d639192016-09-09 11:52:26 -070010199 size_t result_keys_cnt =
10200 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10201
10202 Vector<int32_t> available_result_keys;
10203 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10204 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10205 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10206 }
10207 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10208 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10209 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10210 }
10211 if (supportedFaceDetectMode == 1) {
10212 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10213 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10214 } else if ((supportedFaceDetectMode == 2) ||
10215 (supportedFaceDetectMode == 3)) {
10216 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10217 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10218 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010219#ifndef USE_HAL_3_3
10220 if (hasBlackRegions) {
10221 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10222 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10223 }
10224#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010225
10226 if (gExposeEnableZslKey) {
10227 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10228 }
10229
Thierry Strudel3d639192016-09-09 11:52:26 -070010230 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10231 available_result_keys.array(), available_result_keys.size());
10232
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010233 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010234 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10235 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10236 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10237 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10238 ANDROID_SCALER_CROPPING_TYPE,
10239 ANDROID_SYNC_MAX_LATENCY,
10240 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10241 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10242 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10243 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10244 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10245 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10246 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10247 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10248 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10249 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10250 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10251 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10252 ANDROID_LENS_FACING,
10253 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10254 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10255 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10256 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10257 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10258 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10259 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10260 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10261 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10262 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10263 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10264 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10265 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10266 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10267 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10268 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10269 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10270 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10271 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10272 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010273 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010274 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10275 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10276 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10277 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10278 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10279 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10280 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10281 ANDROID_CONTROL_AVAILABLE_MODES,
10282 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10283 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10284 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10285 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010286 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10287#ifndef USE_HAL_3_3
10288 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10289 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10290#endif
10291 };
10292
10293 Vector<int32_t> available_characteristics_keys;
10294 available_characteristics_keys.appendArray(characteristics_keys_basic,
10295 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10296#ifndef USE_HAL_3_3
10297 if (hasBlackRegions) {
10298 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10299 }
10300#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010301
10302 if (0 <= indexPD) {
10303 int32_t depthKeys[] = {
10304 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10305 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10306 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10307 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10308 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10309 };
10310 available_characteristics_keys.appendArray(depthKeys,
10311 sizeof(depthKeys) / sizeof(depthKeys[0]));
10312 }
10313
Thierry Strudel3d639192016-09-09 11:52:26 -070010314 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010315 available_characteristics_keys.array(),
10316 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010317
10318 /*available stall durations depend on the hw + sw and will be different for different devices */
10319 /*have to add for raw after implementation*/
10320 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10321 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10322
10323 Vector<int64_t> available_stall_durations;
10324 for (uint32_t j = 0; j < stall_formats_count; j++) {
10325 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10326 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10327 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10328 available_stall_durations.add(stall_formats[j]);
10329 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10330 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10331 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10332 }
10333 } else {
10334 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10335 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10336 available_stall_durations.add(stall_formats[j]);
10337 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10338 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10339 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10340 }
10341 }
10342 }
10343 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10344 available_stall_durations.array(),
10345 available_stall_durations.size());
10346
10347 //QCAMERA3_OPAQUE_RAW
10348 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10349 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10350 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10351 case LEGACY_RAW:
10352 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10353 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10354 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10355 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10356 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10357 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10358 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10359 break;
10360 case MIPI_RAW:
10361 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10362 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10363 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10364 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10365 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10366 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10367 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10368 break;
10369 default:
10370 LOGE("unknown opaque_raw_format %d",
10371 gCamCapability[cameraId]->opaque_raw_fmt);
10372 break;
10373 }
10374 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10375
10376 Vector<int32_t> strides;
10377 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10378 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10379 cam_stream_buf_plane_info_t buf_planes;
10380 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10381 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10382 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10383 &gCamCapability[cameraId]->padding_info, &buf_planes);
10384 strides.add(buf_planes.plane_info.mp[0].stride);
10385 }
10386 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10387 strides.size());
10388
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010389 //TBD: remove the following line once backend advertises zzHDR in feature mask
10390 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010391 //Video HDR default
10392 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10393 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010394 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010395 int32_t vhdr_mode[] = {
10396 QCAMERA3_VIDEO_HDR_MODE_OFF,
10397 QCAMERA3_VIDEO_HDR_MODE_ON};
10398
10399 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10400 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10401 vhdr_mode, vhdr_mode_count);
10402 }
10403
Thierry Strudel3d639192016-09-09 11:52:26 -070010404 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10405 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10406 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10407
10408 uint8_t isMonoOnly =
10409 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10410 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10411 &isMonoOnly, 1);
10412
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010413#ifndef USE_HAL_3_3
10414 Vector<int32_t> opaque_size;
10415 for (size_t j = 0; j < scalar_formats_count; j++) {
10416 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10417 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10418 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10419 cam_stream_buf_plane_info_t buf_planes;
10420
10421 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10422 &gCamCapability[cameraId]->padding_info, &buf_planes);
10423
10424 if (rc == 0) {
10425 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10426 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10427 opaque_size.add(buf_planes.plane_info.frame_len);
10428 }else {
10429 LOGE("raw frame calculation failed!");
10430 }
10431 }
10432 }
10433 }
10434
10435 if ((opaque_size.size() > 0) &&
10436 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10437 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10438 else
10439 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10440#endif
10441
Thierry Strudel04e026f2016-10-10 11:27:36 -070010442 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10443 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10444 size = 0;
10445 count = CAM_IR_MODE_MAX;
10446 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10447 for (size_t i = 0; i < count; i++) {
10448 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10449 gCamCapability[cameraId]->supported_ir_modes[i]);
10450 if (NAME_NOT_FOUND != val) {
10451 avail_ir_modes[size] = (int32_t)val;
10452 size++;
10453 }
10454 }
10455 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10456 avail_ir_modes, size);
10457 }
10458
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010459 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10460 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10461 size = 0;
10462 count = CAM_AEC_CONVERGENCE_MAX;
10463 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10464 for (size_t i = 0; i < count; i++) {
10465 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10466 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10467 if (NAME_NOT_FOUND != val) {
10468 available_instant_aec_modes[size] = (int32_t)val;
10469 size++;
10470 }
10471 }
10472 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10473 available_instant_aec_modes, size);
10474 }
10475
Thierry Strudel54dc9782017-02-15 12:12:10 -080010476 int32_t sharpness_range[] = {
10477 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10478 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10479 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10480
10481 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10482 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10483 size = 0;
10484 count = CAM_BINNING_CORRECTION_MODE_MAX;
10485 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10486 for (size_t i = 0; i < count; i++) {
10487 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10488 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10489 gCamCapability[cameraId]->supported_binning_modes[i]);
10490 if (NAME_NOT_FOUND != val) {
10491 avail_binning_modes[size] = (int32_t)val;
10492 size++;
10493 }
10494 }
10495 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10496 avail_binning_modes, size);
10497 }
10498
10499 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10500 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10501 size = 0;
10502 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10503 for (size_t i = 0; i < count; i++) {
10504 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10505 gCamCapability[cameraId]->supported_aec_modes[i]);
10506 if (NAME_NOT_FOUND != val)
10507 available_aec_modes[size++] = val;
10508 }
10509 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10510 available_aec_modes, size);
10511 }
10512
10513 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10514 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10515 size = 0;
10516 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10517 for (size_t i = 0; i < count; i++) {
10518 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10519 gCamCapability[cameraId]->supported_iso_modes[i]);
10520 if (NAME_NOT_FOUND != val)
10521 available_iso_modes[size++] = val;
10522 }
10523 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10524 available_iso_modes, size);
10525 }
10526
10527 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010528 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010529 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10530 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10531 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10532
10533 int32_t available_saturation_range[4];
10534 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10535 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10536 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10537 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10538 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10539 available_saturation_range, 4);
10540
10541 uint8_t is_hdr_values[2];
10542 is_hdr_values[0] = 0;
10543 is_hdr_values[1] = 1;
10544 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10545 is_hdr_values, 2);
10546
10547 float is_hdr_confidence_range[2];
10548 is_hdr_confidence_range[0] = 0.0;
10549 is_hdr_confidence_range[1] = 1.0;
10550 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10551 is_hdr_confidence_range, 2);
10552
Emilian Peev0a972ef2017-03-16 10:25:53 +000010553 size_t eepromLength = strnlen(
10554 reinterpret_cast<const char *>(
10555 gCamCapability[cameraId]->eeprom_version_info),
10556 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10557 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010558 char easelInfo[] = ",E:N";
10559 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10560 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10561 eepromLength += sizeof(easelInfo);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010562 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10563 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010564 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010565 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010566 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10567 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10568 }
10569
Thierry Strudel3d639192016-09-09 11:52:26 -070010570 gStaticMetadata[cameraId] = staticInfo.release();
10571 return rc;
10572}
10573
10574/*===========================================================================
10575 * FUNCTION : makeTable
10576 *
10577 * DESCRIPTION: make a table of sizes
10578 *
10579 * PARAMETERS :
10580 *
10581 *
10582 *==========================================================================*/
10583void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10584 size_t max_size, int32_t *sizeTable)
10585{
10586 size_t j = 0;
10587 if (size > max_size) {
10588 size = max_size;
10589 }
10590 for (size_t i = 0; i < size; i++) {
10591 sizeTable[j] = dimTable[i].width;
10592 sizeTable[j+1] = dimTable[i].height;
10593 j+=2;
10594 }
10595}
10596
10597/*===========================================================================
10598 * FUNCTION : makeFPSTable
10599 *
10600 * DESCRIPTION: make a table of fps ranges
10601 *
10602 * PARAMETERS :
10603 *
10604 *==========================================================================*/
10605void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10606 size_t max_size, int32_t *fpsRangesTable)
10607{
10608 size_t j = 0;
10609 if (size > max_size) {
10610 size = max_size;
10611 }
10612 for (size_t i = 0; i < size; i++) {
10613 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10614 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10615 j+=2;
10616 }
10617}
10618
10619/*===========================================================================
10620 * FUNCTION : makeOverridesList
10621 *
10622 * DESCRIPTION: make a list of scene mode overrides
10623 *
10624 * PARAMETERS :
10625 *
10626 *
10627 *==========================================================================*/
10628void QCamera3HardwareInterface::makeOverridesList(
10629 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10630 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10631{
10632 /*daemon will give a list of overrides for all scene modes.
10633 However we should send the fwk only the overrides for the scene modes
10634 supported by the framework*/
10635 size_t j = 0;
10636 if (size > max_size) {
10637 size = max_size;
10638 }
10639 size_t focus_count = CAM_FOCUS_MODE_MAX;
10640 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10641 focus_count);
10642 for (size_t i = 0; i < size; i++) {
10643 bool supt = false;
10644 size_t index = supported_indexes[i];
10645 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10646 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10647 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10648 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10649 overridesTable[index].awb_mode);
10650 if (NAME_NOT_FOUND != val) {
10651 overridesList[j+1] = (uint8_t)val;
10652 }
10653 uint8_t focus_override = overridesTable[index].af_mode;
10654 for (size_t k = 0; k < focus_count; k++) {
10655 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10656 supt = true;
10657 break;
10658 }
10659 }
10660 if (supt) {
10661 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10662 focus_override);
10663 if (NAME_NOT_FOUND != val) {
10664 overridesList[j+2] = (uint8_t)val;
10665 }
10666 } else {
10667 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10668 }
10669 j+=3;
10670 }
10671}
10672
10673/*===========================================================================
10674 * FUNCTION : filterJpegSizes
10675 *
10676 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10677 * could be downscaled to
10678 *
10679 * PARAMETERS :
10680 *
10681 * RETURN : length of jpegSizes array
10682 *==========================================================================*/
10683
10684size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10685 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10686 uint8_t downscale_factor)
10687{
10688 if (0 == downscale_factor) {
10689 downscale_factor = 1;
10690 }
10691
10692 int32_t min_width = active_array_size.width / downscale_factor;
10693 int32_t min_height = active_array_size.height / downscale_factor;
10694 size_t jpegSizesCnt = 0;
10695 if (processedSizesCnt > maxCount) {
10696 processedSizesCnt = maxCount;
10697 }
10698 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10699 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10700 jpegSizes[jpegSizesCnt] = processedSizes[i];
10701 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10702 jpegSizesCnt += 2;
10703 }
10704 }
10705 return jpegSizesCnt;
10706}
10707
10708/*===========================================================================
10709 * FUNCTION : computeNoiseModelEntryS
10710 *
10711 * DESCRIPTION: function to map a given sensitivity to the S noise
10712 * model parameters in the DNG noise model.
10713 *
10714 * PARAMETERS : sens : the sensor sensitivity
10715 *
10716 ** RETURN : S (sensor amplification) noise
10717 *
10718 *==========================================================================*/
10719double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10720 double s = gCamCapability[mCameraId]->gradient_S * sens +
10721 gCamCapability[mCameraId]->offset_S;
10722 return ((s < 0.0) ? 0.0 : s);
10723}
10724
10725/*===========================================================================
10726 * FUNCTION : computeNoiseModelEntryO
10727 *
10728 * DESCRIPTION: function to map a given sensitivity to the O noise
10729 * model parameters in the DNG noise model.
10730 *
10731 * PARAMETERS : sens : the sensor sensitivity
10732 *
10733 ** RETURN : O (sensor readout) noise
10734 *
10735 *==========================================================================*/
10736double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10737 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10738 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10739 1.0 : (1.0 * sens / max_analog_sens);
10740 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10741 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10742 return ((o < 0.0) ? 0.0 : o);
10743}
10744
10745/*===========================================================================
10746 * FUNCTION : getSensorSensitivity
10747 *
10748 * DESCRIPTION: convert iso_mode to an integer value
10749 *
10750 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10751 *
10752 ** RETURN : sensitivity supported by sensor
10753 *
10754 *==========================================================================*/
10755int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10756{
10757 int32_t sensitivity;
10758
10759 switch (iso_mode) {
10760 case CAM_ISO_MODE_100:
10761 sensitivity = 100;
10762 break;
10763 case CAM_ISO_MODE_200:
10764 sensitivity = 200;
10765 break;
10766 case CAM_ISO_MODE_400:
10767 sensitivity = 400;
10768 break;
10769 case CAM_ISO_MODE_800:
10770 sensitivity = 800;
10771 break;
10772 case CAM_ISO_MODE_1600:
10773 sensitivity = 1600;
10774 break;
10775 default:
10776 sensitivity = -1;
10777 break;
10778 }
10779 return sensitivity;
10780}
10781
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010782int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010783 if (gEaselManagerClient == nullptr) {
10784 gEaselManagerClient = EaselManagerClient::create();
10785 if (gEaselManagerClient == nullptr) {
10786 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10787 return -ENODEV;
10788 }
10789 }
10790
10791 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010792 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10793 // to connect to Easel.
10794 bool doNotpowerOnEasel =
10795 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10796
10797 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010798 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10799 return OK;
10800 }
10801
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010802 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010803 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010804 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010805 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010806 return res;
10807 }
10808
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010809 EaselManagerClientOpened = true;
10810
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010811 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010812 if (res != OK) {
10813 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10814 }
10815
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010816 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010817 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010818
10819 // Expose enableZsl key only when HDR+ mode is enabled.
10820 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010821 }
10822
10823 return OK;
10824}
10825
Thierry Strudel3d639192016-09-09 11:52:26 -070010826/*===========================================================================
10827 * FUNCTION : getCamInfo
10828 *
10829 * DESCRIPTION: query camera capabilities
10830 *
10831 * PARAMETERS :
10832 * @cameraId : camera Id
10833 * @info : camera info struct to be filled in with camera capabilities
10834 *
10835 * RETURN : int type of status
10836 * NO_ERROR -- success
10837 * none-zero failure code
10838 *==========================================================================*/
10839int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10840 struct camera_info *info)
10841{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010842 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010843 int rc = 0;
10844
10845 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010846
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010847 {
10848 Mutex::Autolock l(gHdrPlusClientLock);
10849 rc = initHdrPlusClientLocked();
10850 if (rc != OK) {
10851 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10852 pthread_mutex_unlock(&gCamLock);
10853 return rc;
10854 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010855 }
10856
Thierry Strudel3d639192016-09-09 11:52:26 -070010857 if (NULL == gCamCapability[cameraId]) {
10858 rc = initCapabilities(cameraId);
10859 if (rc < 0) {
10860 pthread_mutex_unlock(&gCamLock);
10861 return rc;
10862 }
10863 }
10864
10865 if (NULL == gStaticMetadata[cameraId]) {
10866 rc = initStaticMetadata(cameraId);
10867 if (rc < 0) {
10868 pthread_mutex_unlock(&gCamLock);
10869 return rc;
10870 }
10871 }
10872
10873 switch(gCamCapability[cameraId]->position) {
10874 case CAM_POSITION_BACK:
10875 case CAM_POSITION_BACK_AUX:
10876 info->facing = CAMERA_FACING_BACK;
10877 break;
10878
10879 case CAM_POSITION_FRONT:
10880 case CAM_POSITION_FRONT_AUX:
10881 info->facing = CAMERA_FACING_FRONT;
10882 break;
10883
10884 default:
10885 LOGE("Unknown position type %d for camera id:%d",
10886 gCamCapability[cameraId]->position, cameraId);
10887 rc = -1;
10888 break;
10889 }
10890
10891
10892 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010893#ifndef USE_HAL_3_3
10894 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10895#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010896 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010897#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010898 info->static_camera_characteristics = gStaticMetadata[cameraId];
10899
10900 //For now assume both cameras can operate independently.
10901 info->conflicting_devices = NULL;
10902 info->conflicting_devices_length = 0;
10903
10904 //resource cost is 100 * MIN(1.0, m/M),
10905 //where m is throughput requirement with maximum stream configuration
10906 //and M is CPP maximum throughput.
10907 float max_fps = 0.0;
10908 for (uint32_t i = 0;
10909 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10910 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10911 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10912 }
10913 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10914 gCamCapability[cameraId]->active_array_size.width *
10915 gCamCapability[cameraId]->active_array_size.height * max_fps /
10916 gCamCapability[cameraId]->max_pixel_bandwidth;
10917 info->resource_cost = 100 * MIN(1.0, ratio);
10918 LOGI("camera %d resource cost is %d", cameraId,
10919 info->resource_cost);
10920
10921 pthread_mutex_unlock(&gCamLock);
10922 return rc;
10923}
10924
10925/*===========================================================================
10926 * FUNCTION : translateCapabilityToMetadata
10927 *
10928 * DESCRIPTION: translate the capability into camera_metadata_t
10929 *
10930 * PARAMETERS : type of the request
10931 *
10932 *
10933 * RETURN : success: camera_metadata_t*
10934 * failure: NULL
10935 *
10936 *==========================================================================*/
10937camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10938{
10939 if (mDefaultMetadata[type] != NULL) {
10940 return mDefaultMetadata[type];
10941 }
10942 //first time we are handling this request
10943 //fill up the metadata structure using the wrapper class
10944 CameraMetadata settings;
10945 //translate from cam_capability_t to camera_metadata_tag_t
10946 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10947 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10948 int32_t defaultRequestID = 0;
10949 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10950
10951 /* OIS disable */
10952 char ois_prop[PROPERTY_VALUE_MAX];
10953 memset(ois_prop, 0, sizeof(ois_prop));
10954 property_get("persist.camera.ois.disable", ois_prop, "0");
10955 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10956
10957 /* Force video to use OIS */
10958 char videoOisProp[PROPERTY_VALUE_MAX];
10959 memset(videoOisProp, 0, sizeof(videoOisProp));
10960 property_get("persist.camera.ois.video", videoOisProp, "1");
10961 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010962
10963 // Hybrid AE enable/disable
10964 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10965 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10966 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10967 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10968
Thierry Strudel3d639192016-09-09 11:52:26 -070010969 uint8_t controlIntent = 0;
10970 uint8_t focusMode;
10971 uint8_t vsMode;
10972 uint8_t optStabMode;
10973 uint8_t cacMode;
10974 uint8_t edge_mode;
10975 uint8_t noise_red_mode;
10976 uint8_t tonemap_mode;
10977 bool highQualityModeEntryAvailable = FALSE;
10978 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010979 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010980 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10981 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010982 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010983 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010984 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010985
Thierry Strudel3d639192016-09-09 11:52:26 -070010986 switch (type) {
10987 case CAMERA3_TEMPLATE_PREVIEW:
10988 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10989 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10990 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10991 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10992 edge_mode = ANDROID_EDGE_MODE_FAST;
10993 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10994 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10995 break;
10996 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10997 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10998 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10999 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11000 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11001 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11002 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11003 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11004 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11005 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11006 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11007 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11008 highQualityModeEntryAvailable = TRUE;
11009 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11010 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11011 fastModeEntryAvailable = TRUE;
11012 }
11013 }
11014 if (highQualityModeEntryAvailable) {
11015 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11016 } else if (fastModeEntryAvailable) {
11017 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11018 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011019 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11020 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11021 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011022 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011023 break;
11024 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11025 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11026 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11027 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011028 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11029 edge_mode = ANDROID_EDGE_MODE_FAST;
11030 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11031 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11032 if (forceVideoOis)
11033 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11034 break;
11035 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11036 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11037 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11038 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011039 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11040 edge_mode = ANDROID_EDGE_MODE_FAST;
11041 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11042 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11043 if (forceVideoOis)
11044 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11045 break;
11046 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11047 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11048 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11049 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11050 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11051 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11052 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11053 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11054 break;
11055 case CAMERA3_TEMPLATE_MANUAL:
11056 edge_mode = ANDROID_EDGE_MODE_FAST;
11057 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11058 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11059 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11060 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11061 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11062 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11063 break;
11064 default:
11065 edge_mode = ANDROID_EDGE_MODE_FAST;
11066 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11067 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11068 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11069 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11070 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11071 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11072 break;
11073 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011074 // Set CAC to OFF if underlying device doesn't support
11075 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11076 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11077 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011078 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11079 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11080 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11081 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11082 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11083 }
11084 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011085 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011086 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011087
11088 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11089 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11090 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11091 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11092 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11093 || ois_disable)
11094 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11095 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011096 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011097
11098 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11099 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11100
11101 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11102 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11103
11104 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11105 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11106
11107 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11108 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11109
11110 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11111 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11112
11113 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11114 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11115
11116 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11117 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11118
11119 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11120 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11121
11122 /*flash*/
11123 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11124 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11125
11126 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11127 settings.update(ANDROID_FLASH_FIRING_POWER,
11128 &flashFiringLevel, 1);
11129
11130 /* lens */
11131 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11132 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11133
11134 if (gCamCapability[mCameraId]->filter_densities_count) {
11135 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11136 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11137 gCamCapability[mCameraId]->filter_densities_count);
11138 }
11139
11140 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11141 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11142
Thierry Strudel3d639192016-09-09 11:52:26 -070011143 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11144 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11145
11146 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11147 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11148
11149 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11150 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11151
11152 /* face detection (default to OFF) */
11153 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11154 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11155
Thierry Strudel54dc9782017-02-15 12:12:10 -080011156 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11157 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011158
11159 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11160 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11161
11162 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11163 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11164
Thierry Strudel3d639192016-09-09 11:52:26 -070011165
11166 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11167 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11168
11169 /* Exposure time(Update the Min Exposure Time)*/
11170 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11171 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11172
11173 /* frame duration */
11174 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11175 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11176
11177 /* sensitivity */
11178 static const int32_t default_sensitivity = 100;
11179 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011180#ifndef USE_HAL_3_3
11181 static const int32_t default_isp_sensitivity =
11182 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11183 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11184#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011185
11186 /*edge mode*/
11187 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11188
11189 /*noise reduction mode*/
11190 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11191
11192 /*color correction mode*/
11193 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11194 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11195
11196 /*transform matrix mode*/
11197 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11198
11199 int32_t scaler_crop_region[4];
11200 scaler_crop_region[0] = 0;
11201 scaler_crop_region[1] = 0;
11202 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11203 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11204 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11205
11206 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11207 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11208
11209 /*focus distance*/
11210 float focus_distance = 0.0;
11211 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11212
11213 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011214 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011215 float max_range = 0.0;
11216 float max_fixed_fps = 0.0;
11217 int32_t fps_range[2] = {0, 0};
11218 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11219 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011220 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11221 TEMPLATE_MAX_PREVIEW_FPS) {
11222 continue;
11223 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011224 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11225 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11226 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11227 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11228 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11229 if (range > max_range) {
11230 fps_range[0] =
11231 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11232 fps_range[1] =
11233 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11234 max_range = range;
11235 }
11236 } else {
11237 if (range < 0.01 && max_fixed_fps <
11238 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11239 fps_range[0] =
11240 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11241 fps_range[1] =
11242 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11243 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11244 }
11245 }
11246 }
11247 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11248
11249 /*precapture trigger*/
11250 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11251 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11252
11253 /*af trigger*/
11254 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11255 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11256
11257 /* ae & af regions */
11258 int32_t active_region[] = {
11259 gCamCapability[mCameraId]->active_array_size.left,
11260 gCamCapability[mCameraId]->active_array_size.top,
11261 gCamCapability[mCameraId]->active_array_size.left +
11262 gCamCapability[mCameraId]->active_array_size.width,
11263 gCamCapability[mCameraId]->active_array_size.top +
11264 gCamCapability[mCameraId]->active_array_size.height,
11265 0};
11266 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11267 sizeof(active_region) / sizeof(active_region[0]));
11268 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11269 sizeof(active_region) / sizeof(active_region[0]));
11270
11271 /* black level lock */
11272 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11273 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11274
Thierry Strudel3d639192016-09-09 11:52:26 -070011275 //special defaults for manual template
11276 if (type == CAMERA3_TEMPLATE_MANUAL) {
11277 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11278 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11279
11280 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11281 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11282
11283 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11284 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11285
11286 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11287 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11288
11289 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11290 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11291
11292 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11293 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11294 }
11295
11296
11297 /* TNR
11298 * We'll use this location to determine which modes TNR will be set.
11299 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11300 * This is not to be confused with linking on a per stream basis that decision
11301 * is still on per-session basis and will be handled as part of config stream
11302 */
11303 uint8_t tnr_enable = 0;
11304
11305 if (m_bTnrPreview || m_bTnrVideo) {
11306
11307 switch (type) {
11308 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11309 tnr_enable = 1;
11310 break;
11311
11312 default:
11313 tnr_enable = 0;
11314 break;
11315 }
11316
11317 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11318 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11319 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11320
11321 LOGD("TNR:%d with process plate %d for template:%d",
11322 tnr_enable, tnr_process_type, type);
11323 }
11324
11325 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011326 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011327 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11328
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011329 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011330 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11331
Shuzhen Wang920ea402017-05-03 08:49:39 -070011332 uint8_t related_camera_id = mCameraId;
11333 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011334
11335 /* CDS default */
11336 char prop[PROPERTY_VALUE_MAX];
11337 memset(prop, 0, sizeof(prop));
11338 property_get("persist.camera.CDS", prop, "Auto");
11339 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11340 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11341 if (CAM_CDS_MODE_MAX == cds_mode) {
11342 cds_mode = CAM_CDS_MODE_AUTO;
11343 }
11344
11345 /* Disabling CDS in templates which have TNR enabled*/
11346 if (tnr_enable)
11347 cds_mode = CAM_CDS_MODE_OFF;
11348
11349 int32_t mode = cds_mode;
11350 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011351
Thierry Strudel269c81a2016-10-12 12:13:59 -070011352 /* Manual Convergence AEC Speed is disabled by default*/
11353 float default_aec_speed = 0;
11354 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11355
11356 /* Manual Convergence AWB Speed is disabled by default*/
11357 float default_awb_speed = 0;
11358 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11359
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011360 // Set instant AEC to normal convergence by default
11361 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11362 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11363
Shuzhen Wang19463d72016-03-08 11:09:52 -080011364 /* hybrid ae */
11365 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11366
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011367 if (gExposeEnableZslKey) {
11368 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11369 }
11370
Thierry Strudel3d639192016-09-09 11:52:26 -070011371 mDefaultMetadata[type] = settings.release();
11372
11373 return mDefaultMetadata[type];
11374}
11375
11376/*===========================================================================
11377 * FUNCTION : setFrameParameters
11378 *
11379 * DESCRIPTION: set parameters per frame as requested in the metadata from
11380 * framework
11381 *
11382 * PARAMETERS :
11383 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011384 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011385 * @blob_request: Whether this request is a blob request or not
11386 *
11387 * RETURN : success: NO_ERROR
11388 * failure:
11389 *==========================================================================*/
11390int QCamera3HardwareInterface::setFrameParameters(
11391 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011392 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011393 int blob_request,
11394 uint32_t snapshotStreamId)
11395{
11396 /*translate from camera_metadata_t type to parm_type_t*/
11397 int rc = 0;
11398 int32_t hal_version = CAM_HAL_V3;
11399
11400 clear_metadata_buffer(mParameters);
11401 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11402 LOGE("Failed to set hal version in the parameters");
11403 return BAD_VALUE;
11404 }
11405
11406 /*we need to update the frame number in the parameters*/
11407 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11408 request->frame_number)) {
11409 LOGE("Failed to set the frame number in the parameters");
11410 return BAD_VALUE;
11411 }
11412
11413 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011414 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011415 LOGE("Failed to set stream type mask in the parameters");
11416 return BAD_VALUE;
11417 }
11418
11419 if (mUpdateDebugLevel) {
11420 uint32_t dummyDebugLevel = 0;
11421 /* The value of dummyDebugLevel is irrelavent. On
11422 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11423 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11424 dummyDebugLevel)) {
11425 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11426 return BAD_VALUE;
11427 }
11428 mUpdateDebugLevel = false;
11429 }
11430
11431 if(request->settings != NULL){
11432 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11433 if (blob_request)
11434 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11435 }
11436
11437 return rc;
11438}
11439
11440/*===========================================================================
11441 * FUNCTION : setReprocParameters
11442 *
11443 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11444 * return it.
11445 *
11446 * PARAMETERS :
11447 * @request : request that needs to be serviced
11448 *
11449 * RETURN : success: NO_ERROR
11450 * failure:
11451 *==========================================================================*/
11452int32_t QCamera3HardwareInterface::setReprocParameters(
11453 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11454 uint32_t snapshotStreamId)
11455{
11456 /*translate from camera_metadata_t type to parm_type_t*/
11457 int rc = 0;
11458
11459 if (NULL == request->settings){
11460 LOGE("Reprocess settings cannot be NULL");
11461 return BAD_VALUE;
11462 }
11463
11464 if (NULL == reprocParam) {
11465 LOGE("Invalid reprocessing metadata buffer");
11466 return BAD_VALUE;
11467 }
11468 clear_metadata_buffer(reprocParam);
11469
11470 /*we need to update the frame number in the parameters*/
11471 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11472 request->frame_number)) {
11473 LOGE("Failed to set the frame number in the parameters");
11474 return BAD_VALUE;
11475 }
11476
11477 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11478 if (rc < 0) {
11479 LOGE("Failed to translate reproc request");
11480 return rc;
11481 }
11482
11483 CameraMetadata frame_settings;
11484 frame_settings = request->settings;
11485 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11486 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11487 int32_t *crop_count =
11488 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11489 int32_t *crop_data =
11490 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11491 int32_t *roi_map =
11492 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11493 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11494 cam_crop_data_t crop_meta;
11495 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11496 crop_meta.num_of_streams = 1;
11497 crop_meta.crop_info[0].crop.left = crop_data[0];
11498 crop_meta.crop_info[0].crop.top = crop_data[1];
11499 crop_meta.crop_info[0].crop.width = crop_data[2];
11500 crop_meta.crop_info[0].crop.height = crop_data[3];
11501
11502 crop_meta.crop_info[0].roi_map.left =
11503 roi_map[0];
11504 crop_meta.crop_info[0].roi_map.top =
11505 roi_map[1];
11506 crop_meta.crop_info[0].roi_map.width =
11507 roi_map[2];
11508 crop_meta.crop_info[0].roi_map.height =
11509 roi_map[3];
11510
11511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11512 rc = BAD_VALUE;
11513 }
11514 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11515 request->input_buffer->stream,
11516 crop_meta.crop_info[0].crop.left,
11517 crop_meta.crop_info[0].crop.top,
11518 crop_meta.crop_info[0].crop.width,
11519 crop_meta.crop_info[0].crop.height);
11520 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11521 request->input_buffer->stream,
11522 crop_meta.crop_info[0].roi_map.left,
11523 crop_meta.crop_info[0].roi_map.top,
11524 crop_meta.crop_info[0].roi_map.width,
11525 crop_meta.crop_info[0].roi_map.height);
11526 } else {
11527 LOGE("Invalid reprocess crop count %d!", *crop_count);
11528 }
11529 } else {
11530 LOGE("No crop data from matching output stream");
11531 }
11532
11533 /* These settings are not needed for regular requests so handle them specially for
11534 reprocess requests; information needed for EXIF tags */
11535 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11536 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11537 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11538 if (NAME_NOT_FOUND != val) {
11539 uint32_t flashMode = (uint32_t)val;
11540 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11541 rc = BAD_VALUE;
11542 }
11543 } else {
11544 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11545 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11546 }
11547 } else {
11548 LOGH("No flash mode in reprocess settings");
11549 }
11550
11551 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11552 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11553 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11554 rc = BAD_VALUE;
11555 }
11556 } else {
11557 LOGH("No flash state in reprocess settings");
11558 }
11559
11560 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11561 uint8_t *reprocessFlags =
11562 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11563 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11564 *reprocessFlags)) {
11565 rc = BAD_VALUE;
11566 }
11567 }
11568
Thierry Strudel54dc9782017-02-15 12:12:10 -080011569 // Add exif debug data to internal metadata
11570 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11571 mm_jpeg_debug_exif_params_t *debug_params =
11572 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11573 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11574 // AE
11575 if (debug_params->ae_debug_params_valid == TRUE) {
11576 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11577 debug_params->ae_debug_params);
11578 }
11579 // AWB
11580 if (debug_params->awb_debug_params_valid == TRUE) {
11581 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11582 debug_params->awb_debug_params);
11583 }
11584 // AF
11585 if (debug_params->af_debug_params_valid == TRUE) {
11586 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11587 debug_params->af_debug_params);
11588 }
11589 // ASD
11590 if (debug_params->asd_debug_params_valid == TRUE) {
11591 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11592 debug_params->asd_debug_params);
11593 }
11594 // Stats
11595 if (debug_params->stats_debug_params_valid == TRUE) {
11596 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11597 debug_params->stats_debug_params);
11598 }
11599 // BE Stats
11600 if (debug_params->bestats_debug_params_valid == TRUE) {
11601 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11602 debug_params->bestats_debug_params);
11603 }
11604 // BHIST
11605 if (debug_params->bhist_debug_params_valid == TRUE) {
11606 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11607 debug_params->bhist_debug_params);
11608 }
11609 // 3A Tuning
11610 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11611 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11612 debug_params->q3a_tuning_debug_params);
11613 }
11614 }
11615
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011616 // Add metadata which reprocess needs
11617 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11618 cam_reprocess_info_t *repro_info =
11619 (cam_reprocess_info_t *)frame_settings.find
11620 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011621 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011622 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011623 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011624 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011625 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011626 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011627 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011628 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011629 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011630 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011631 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011632 repro_info->pipeline_flip);
11633 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11634 repro_info->af_roi);
11635 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11636 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011637 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11638 CAM_INTF_PARM_ROTATION metadata then has been added in
11639 translateToHalMetadata. HAL need to keep this new rotation
11640 metadata. Otherwise, the old rotation info saved in the vendor tag
11641 would be used */
11642 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11643 CAM_INTF_PARM_ROTATION, reprocParam) {
11644 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11645 } else {
11646 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011647 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011648 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011649 }
11650
11651 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11652 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11653 roi.width and roi.height would be the final JPEG size.
11654 For now, HAL only checks this for reprocess request */
11655 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11656 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11657 uint8_t *enable =
11658 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11659 if (*enable == TRUE) {
11660 int32_t *crop_data =
11661 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11662 cam_stream_crop_info_t crop_meta;
11663 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11664 crop_meta.stream_id = 0;
11665 crop_meta.crop.left = crop_data[0];
11666 crop_meta.crop.top = crop_data[1];
11667 crop_meta.crop.width = crop_data[2];
11668 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011669 // The JPEG crop roi should match cpp output size
11670 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11671 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11672 crop_meta.roi_map.left = 0;
11673 crop_meta.roi_map.top = 0;
11674 crop_meta.roi_map.width = cpp_crop->crop.width;
11675 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011676 }
11677 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11678 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011679 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011680 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011681 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11682 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011683 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011684 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11685
11686 // Add JPEG scale information
11687 cam_dimension_t scale_dim;
11688 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11689 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11690 int32_t *roi =
11691 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11692 scale_dim.width = roi[2];
11693 scale_dim.height = roi[3];
11694 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11695 scale_dim);
11696 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11697 scale_dim.width, scale_dim.height, mCameraId);
11698 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011699 }
11700 }
11701
11702 return rc;
11703}
11704
11705/*===========================================================================
11706 * FUNCTION : saveRequestSettings
11707 *
11708 * DESCRIPTION: Add any settings that might have changed to the request settings
11709 * and save the settings to be applied on the frame
11710 *
11711 * PARAMETERS :
11712 * @jpegMetadata : the extracted and/or modified jpeg metadata
11713 * @request : request with initial settings
11714 *
11715 * RETURN :
11716 * camera_metadata_t* : pointer to the saved request settings
11717 *==========================================================================*/
11718camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11719 const CameraMetadata &jpegMetadata,
11720 camera3_capture_request_t *request)
11721{
11722 camera_metadata_t *resultMetadata;
11723 CameraMetadata camMetadata;
11724 camMetadata = request->settings;
11725
11726 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11727 int32_t thumbnail_size[2];
11728 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11729 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11730 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11731 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11732 }
11733
11734 if (request->input_buffer != NULL) {
11735 uint8_t reprocessFlags = 1;
11736 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11737 (uint8_t*)&reprocessFlags,
11738 sizeof(reprocessFlags));
11739 }
11740
11741 resultMetadata = camMetadata.release();
11742 return resultMetadata;
11743}
11744
11745/*===========================================================================
11746 * FUNCTION : setHalFpsRange
11747 *
11748 * DESCRIPTION: set FPS range parameter
11749 *
11750 *
11751 * PARAMETERS :
11752 * @settings : Metadata from framework
11753 * @hal_metadata: Metadata buffer
11754 *
11755 *
11756 * RETURN : success: NO_ERROR
11757 * failure:
11758 *==========================================================================*/
11759int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11760 metadata_buffer_t *hal_metadata)
11761{
11762 int32_t rc = NO_ERROR;
11763 cam_fps_range_t fps_range;
11764 fps_range.min_fps = (float)
11765 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11766 fps_range.max_fps = (float)
11767 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11768 fps_range.video_min_fps = fps_range.min_fps;
11769 fps_range.video_max_fps = fps_range.max_fps;
11770
11771 LOGD("aeTargetFpsRange fps: [%f %f]",
11772 fps_range.min_fps, fps_range.max_fps);
11773 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11774 * follows:
11775 * ---------------------------------------------------------------|
11776 * Video stream is absent in configure_streams |
11777 * (Camcorder preview before the first video record |
11778 * ---------------------------------------------------------------|
11779 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11780 * | | | vid_min/max_fps|
11781 * ---------------------------------------------------------------|
11782 * NO | [ 30, 240] | 240 | [240, 240] |
11783 * |-------------|-------------|----------------|
11784 * | [240, 240] | 240 | [240, 240] |
11785 * ---------------------------------------------------------------|
11786 * Video stream is present in configure_streams |
11787 * ---------------------------------------------------------------|
11788 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11789 * | | | vid_min/max_fps|
11790 * ---------------------------------------------------------------|
11791 * NO | [ 30, 240] | 240 | [240, 240] |
11792 * (camcorder prev |-------------|-------------|----------------|
11793 * after video rec | [240, 240] | 240 | [240, 240] |
11794 * is stopped) | | | |
11795 * ---------------------------------------------------------------|
11796 * YES | [ 30, 240] | 240 | [240, 240] |
11797 * |-------------|-------------|----------------|
11798 * | [240, 240] | 240 | [240, 240] |
11799 * ---------------------------------------------------------------|
11800 * When Video stream is absent in configure_streams,
11801 * preview fps = sensor_fps / batchsize
11802 * Eg: for 240fps at batchSize 4, preview = 60fps
11803 * for 120fps at batchSize 4, preview = 30fps
11804 *
11805 * When video stream is present in configure_streams, preview fps is as per
11806 * the ratio of preview buffers to video buffers requested in process
11807 * capture request
11808 */
11809 mBatchSize = 0;
11810 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11811 fps_range.min_fps = fps_range.video_max_fps;
11812 fps_range.video_min_fps = fps_range.video_max_fps;
11813 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11814 fps_range.max_fps);
11815 if (NAME_NOT_FOUND != val) {
11816 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11817 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11818 return BAD_VALUE;
11819 }
11820
11821 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11822 /* If batchmode is currently in progress and the fps changes,
11823 * set the flag to restart the sensor */
11824 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11825 (mHFRVideoFps != fps_range.max_fps)) {
11826 mNeedSensorRestart = true;
11827 }
11828 mHFRVideoFps = fps_range.max_fps;
11829 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11830 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11831 mBatchSize = MAX_HFR_BATCH_SIZE;
11832 }
11833 }
11834 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11835
11836 }
11837 } else {
11838 /* HFR mode is session param in backend/ISP. This should be reset when
11839 * in non-HFR mode */
11840 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11841 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11842 return BAD_VALUE;
11843 }
11844 }
11845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11846 return BAD_VALUE;
11847 }
11848 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11849 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11850 return rc;
11851}
11852
11853/*===========================================================================
11854 * FUNCTION : translateToHalMetadata
11855 *
11856 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11857 *
11858 *
11859 * PARAMETERS :
11860 * @request : request sent from framework
11861 *
11862 *
11863 * RETURN : success: NO_ERROR
11864 * failure:
11865 *==========================================================================*/
11866int QCamera3HardwareInterface::translateToHalMetadata
11867 (const camera3_capture_request_t *request,
11868 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011869 uint32_t snapshotStreamId) {
11870 if (request == nullptr || hal_metadata == nullptr) {
11871 return BAD_VALUE;
11872 }
11873
11874 int64_t minFrameDuration = getMinFrameDuration(request);
11875
11876 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11877 minFrameDuration);
11878}
11879
11880int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11881 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11882 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11883
Thierry Strudel3d639192016-09-09 11:52:26 -070011884 int rc = 0;
11885 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011886 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011887
11888 /* Do not change the order of the following list unless you know what you are
11889 * doing.
11890 * The order is laid out in such a way that parameters in the front of the table
11891 * may be used to override the parameters later in the table. Examples are:
11892 * 1. META_MODE should precede AEC/AWB/AF MODE
11893 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11894 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11895 * 4. Any mode should precede it's corresponding settings
11896 */
11897 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11898 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11899 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11900 rc = BAD_VALUE;
11901 }
11902 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11903 if (rc != NO_ERROR) {
11904 LOGE("extractSceneMode failed");
11905 }
11906 }
11907
11908 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11909 uint8_t fwk_aeMode =
11910 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11911 uint8_t aeMode;
11912 int32_t redeye;
11913
11914 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11915 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011916 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11917 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011918 } else {
11919 aeMode = CAM_AE_MODE_ON;
11920 }
11921 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11922 redeye = 1;
11923 } else {
11924 redeye = 0;
11925 }
11926
11927 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11928 fwk_aeMode);
11929 if (NAME_NOT_FOUND != val) {
11930 int32_t flashMode = (int32_t)val;
11931 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11932 }
11933
11934 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11935 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11936 rc = BAD_VALUE;
11937 }
11938 }
11939
11940 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11941 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11942 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11943 fwk_whiteLevel);
11944 if (NAME_NOT_FOUND != val) {
11945 uint8_t whiteLevel = (uint8_t)val;
11946 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11947 rc = BAD_VALUE;
11948 }
11949 }
11950 }
11951
11952 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11953 uint8_t fwk_cacMode =
11954 frame_settings.find(
11955 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11956 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11957 fwk_cacMode);
11958 if (NAME_NOT_FOUND != val) {
11959 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11960 bool entryAvailable = FALSE;
11961 // Check whether Frameworks set CAC mode is supported in device or not
11962 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11963 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11964 entryAvailable = TRUE;
11965 break;
11966 }
11967 }
11968 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11969 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11970 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11971 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11972 if (entryAvailable == FALSE) {
11973 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11974 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11975 } else {
11976 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11977 // High is not supported and so set the FAST as spec say's underlying
11978 // device implementation can be the same for both modes.
11979 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11980 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11981 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11982 // in order to avoid the fps drop due to high quality
11983 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11984 } else {
11985 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11986 }
11987 }
11988 }
11989 LOGD("Final cacMode is %d", cacMode);
11990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11991 rc = BAD_VALUE;
11992 }
11993 } else {
11994 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11995 }
11996 }
11997
Thierry Strudel2896d122017-02-23 19:18:03 -080011998 char af_value[PROPERTY_VALUE_MAX];
11999 property_get("persist.camera.af.infinity", af_value, "0");
12000
Jason Lee84ae9972017-02-24 13:24:24 -080012001 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080012002 if (atoi(af_value) == 0) {
12003 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012004 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012005 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12006 fwk_focusMode);
12007 if (NAME_NOT_FOUND != val) {
12008 uint8_t focusMode = (uint8_t)val;
12009 LOGD("set focus mode %d", focusMode);
12010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12011 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12012 rc = BAD_VALUE;
12013 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012014 }
12015 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012016 } else {
12017 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12018 LOGE("Focus forced to infinity %d", focusMode);
12019 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12020 rc = BAD_VALUE;
12021 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012022 }
12023
Jason Lee84ae9972017-02-24 13:24:24 -080012024 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12025 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012026 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12027 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12028 focalDistance)) {
12029 rc = BAD_VALUE;
12030 }
12031 }
12032
12033 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12034 uint8_t fwk_antibandingMode =
12035 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12036 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12037 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12038 if (NAME_NOT_FOUND != val) {
12039 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012040 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12041 if (m60HzZone) {
12042 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12043 } else {
12044 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12045 }
12046 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12048 hal_antibandingMode)) {
12049 rc = BAD_VALUE;
12050 }
12051 }
12052 }
12053
12054 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12055 int32_t expCompensation = frame_settings.find(
12056 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12057 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12058 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12059 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12060 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012061 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012062 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12063 expCompensation)) {
12064 rc = BAD_VALUE;
12065 }
12066 }
12067
12068 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12069 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12070 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12071 rc = BAD_VALUE;
12072 }
12073 }
12074 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12075 rc = setHalFpsRange(frame_settings, hal_metadata);
12076 if (rc != NO_ERROR) {
12077 LOGE("setHalFpsRange failed");
12078 }
12079 }
12080
12081 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12082 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12083 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12084 rc = BAD_VALUE;
12085 }
12086 }
12087
12088 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12089 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12090 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12091 fwk_effectMode);
12092 if (NAME_NOT_FOUND != val) {
12093 uint8_t effectMode = (uint8_t)val;
12094 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12095 rc = BAD_VALUE;
12096 }
12097 }
12098 }
12099
12100 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12101 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12102 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12103 colorCorrectMode)) {
12104 rc = BAD_VALUE;
12105 }
12106 }
12107
12108 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12109 cam_color_correct_gains_t colorCorrectGains;
12110 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12111 colorCorrectGains.gains[i] =
12112 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12113 }
12114 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12115 colorCorrectGains)) {
12116 rc = BAD_VALUE;
12117 }
12118 }
12119
12120 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12121 cam_color_correct_matrix_t colorCorrectTransform;
12122 cam_rational_type_t transform_elem;
12123 size_t num = 0;
12124 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12125 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12126 transform_elem.numerator =
12127 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12128 transform_elem.denominator =
12129 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12130 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12131 num++;
12132 }
12133 }
12134 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12135 colorCorrectTransform)) {
12136 rc = BAD_VALUE;
12137 }
12138 }
12139
12140 cam_trigger_t aecTrigger;
12141 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12142 aecTrigger.trigger_id = -1;
12143 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12144 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12145 aecTrigger.trigger =
12146 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12147 aecTrigger.trigger_id =
12148 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12149 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12150 aecTrigger)) {
12151 rc = BAD_VALUE;
12152 }
12153 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12154 aecTrigger.trigger, aecTrigger.trigger_id);
12155 }
12156
12157 /*af_trigger must come with a trigger id*/
12158 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12159 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12160 cam_trigger_t af_trigger;
12161 af_trigger.trigger =
12162 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12163 af_trigger.trigger_id =
12164 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12165 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12166 rc = BAD_VALUE;
12167 }
12168 LOGD("AfTrigger: %d AfTriggerID: %d",
12169 af_trigger.trigger, af_trigger.trigger_id);
12170 }
12171
12172 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12173 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12174 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12175 rc = BAD_VALUE;
12176 }
12177 }
12178 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12179 cam_edge_application_t edge_application;
12180 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012181
Thierry Strudel3d639192016-09-09 11:52:26 -070012182 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12183 edge_application.sharpness = 0;
12184 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012185 edge_application.sharpness =
12186 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12187 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12188 int32_t sharpness =
12189 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12190 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12191 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12192 LOGD("Setting edge mode sharpness %d", sharpness);
12193 edge_application.sharpness = sharpness;
12194 }
12195 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012196 }
12197 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12198 rc = BAD_VALUE;
12199 }
12200 }
12201
12202 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12203 int32_t respectFlashMode = 1;
12204 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12205 uint8_t fwk_aeMode =
12206 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012207 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12208 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12209 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012210 respectFlashMode = 0;
12211 LOGH("AE Mode controls flash, ignore android.flash.mode");
12212 }
12213 }
12214 if (respectFlashMode) {
12215 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12216 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12217 LOGH("flash mode after mapping %d", val);
12218 // To check: CAM_INTF_META_FLASH_MODE usage
12219 if (NAME_NOT_FOUND != val) {
12220 uint8_t flashMode = (uint8_t)val;
12221 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12222 rc = BAD_VALUE;
12223 }
12224 }
12225 }
12226 }
12227
12228 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12229 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12231 rc = BAD_VALUE;
12232 }
12233 }
12234
12235 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12236 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12237 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12238 flashFiringTime)) {
12239 rc = BAD_VALUE;
12240 }
12241 }
12242
12243 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12244 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12245 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12246 hotPixelMode)) {
12247 rc = BAD_VALUE;
12248 }
12249 }
12250
12251 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12252 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12253 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12254 lensAperture)) {
12255 rc = BAD_VALUE;
12256 }
12257 }
12258
12259 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12260 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12261 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12262 filterDensity)) {
12263 rc = BAD_VALUE;
12264 }
12265 }
12266
12267 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12268 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12269 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12270 focalLength)) {
12271 rc = BAD_VALUE;
12272 }
12273 }
12274
12275 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12276 uint8_t optStabMode =
12277 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12278 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12279 optStabMode)) {
12280 rc = BAD_VALUE;
12281 }
12282 }
12283
12284 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12285 uint8_t videoStabMode =
12286 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12287 LOGD("videoStabMode from APP = %d", videoStabMode);
12288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12289 videoStabMode)) {
12290 rc = BAD_VALUE;
12291 }
12292 }
12293
12294
12295 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12296 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12298 noiseRedMode)) {
12299 rc = BAD_VALUE;
12300 }
12301 }
12302
12303 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12304 float reprocessEffectiveExposureFactor =
12305 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12306 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12307 reprocessEffectiveExposureFactor)) {
12308 rc = BAD_VALUE;
12309 }
12310 }
12311
12312 cam_crop_region_t scalerCropRegion;
12313 bool scalerCropSet = false;
12314 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12315 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12316 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12317 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12318 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12319
12320 // Map coordinate system from active array to sensor output.
12321 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12322 scalerCropRegion.width, scalerCropRegion.height);
12323
12324 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12325 scalerCropRegion)) {
12326 rc = BAD_VALUE;
12327 }
12328 scalerCropSet = true;
12329 }
12330
12331 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12332 int64_t sensorExpTime =
12333 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12334 LOGD("setting sensorExpTime %lld", sensorExpTime);
12335 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12336 sensorExpTime)) {
12337 rc = BAD_VALUE;
12338 }
12339 }
12340
12341 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12342 int64_t sensorFrameDuration =
12343 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012344 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12345 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12346 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12347 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12348 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12349 sensorFrameDuration)) {
12350 rc = BAD_VALUE;
12351 }
12352 }
12353
12354 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12355 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12356 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12357 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12358 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12359 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12360 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12361 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12362 sensorSensitivity)) {
12363 rc = BAD_VALUE;
12364 }
12365 }
12366
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012367#ifndef USE_HAL_3_3
12368 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12369 int32_t ispSensitivity =
12370 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12371 if (ispSensitivity <
12372 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12373 ispSensitivity =
12374 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12375 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12376 }
12377 if (ispSensitivity >
12378 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12379 ispSensitivity =
12380 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12381 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12382 }
12383 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12384 ispSensitivity)) {
12385 rc = BAD_VALUE;
12386 }
12387 }
12388#endif
12389
Thierry Strudel3d639192016-09-09 11:52:26 -070012390 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12391 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12392 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12393 rc = BAD_VALUE;
12394 }
12395 }
12396
12397 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12398 uint8_t fwk_facedetectMode =
12399 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12400
12401 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12402 fwk_facedetectMode);
12403
12404 if (NAME_NOT_FOUND != val) {
12405 uint8_t facedetectMode = (uint8_t)val;
12406 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12407 facedetectMode)) {
12408 rc = BAD_VALUE;
12409 }
12410 }
12411 }
12412
Thierry Strudel54dc9782017-02-15 12:12:10 -080012413 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012414 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012415 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012416 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12417 histogramMode)) {
12418 rc = BAD_VALUE;
12419 }
12420 }
12421
12422 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12423 uint8_t sharpnessMapMode =
12424 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12425 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12426 sharpnessMapMode)) {
12427 rc = BAD_VALUE;
12428 }
12429 }
12430
12431 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12432 uint8_t tonemapMode =
12433 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12434 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12435 rc = BAD_VALUE;
12436 }
12437 }
12438 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12439 /*All tonemap channels will have the same number of points*/
12440 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12441 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12442 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12443 cam_rgb_tonemap_curves tonemapCurves;
12444 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12445 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12446 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12447 tonemapCurves.tonemap_points_cnt,
12448 CAM_MAX_TONEMAP_CURVE_SIZE);
12449 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12450 }
12451
12452 /* ch0 = G*/
12453 size_t point = 0;
12454 cam_tonemap_curve_t tonemapCurveGreen;
12455 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12456 for (size_t j = 0; j < 2; j++) {
12457 tonemapCurveGreen.tonemap_points[i][j] =
12458 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12459 point++;
12460 }
12461 }
12462 tonemapCurves.curves[0] = tonemapCurveGreen;
12463
12464 /* ch 1 = B */
12465 point = 0;
12466 cam_tonemap_curve_t tonemapCurveBlue;
12467 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12468 for (size_t j = 0; j < 2; j++) {
12469 tonemapCurveBlue.tonemap_points[i][j] =
12470 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12471 point++;
12472 }
12473 }
12474 tonemapCurves.curves[1] = tonemapCurveBlue;
12475
12476 /* ch 2 = R */
12477 point = 0;
12478 cam_tonemap_curve_t tonemapCurveRed;
12479 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12480 for (size_t j = 0; j < 2; j++) {
12481 tonemapCurveRed.tonemap_points[i][j] =
12482 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12483 point++;
12484 }
12485 }
12486 tonemapCurves.curves[2] = tonemapCurveRed;
12487
12488 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12489 tonemapCurves)) {
12490 rc = BAD_VALUE;
12491 }
12492 }
12493
12494 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12495 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12496 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12497 captureIntent)) {
12498 rc = BAD_VALUE;
12499 }
12500 }
12501
12502 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12503 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12505 blackLevelLock)) {
12506 rc = BAD_VALUE;
12507 }
12508 }
12509
12510 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12511 uint8_t lensShadingMapMode =
12512 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12513 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12514 lensShadingMapMode)) {
12515 rc = BAD_VALUE;
12516 }
12517 }
12518
12519 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12520 cam_area_t roi;
12521 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012522 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012523
12524 // Map coordinate system from active array to sensor output.
12525 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12526 roi.rect.height);
12527
12528 if (scalerCropSet) {
12529 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12530 }
12531 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12532 rc = BAD_VALUE;
12533 }
12534 }
12535
12536 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12537 cam_area_t roi;
12538 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012539 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012540
12541 // Map coordinate system from active array to sensor output.
12542 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12543 roi.rect.height);
12544
12545 if (scalerCropSet) {
12546 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12547 }
12548 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12549 rc = BAD_VALUE;
12550 }
12551 }
12552
12553 // CDS for non-HFR non-video mode
12554 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12555 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12556 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12557 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12558 LOGE("Invalid CDS mode %d!", *fwk_cds);
12559 } else {
12560 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12561 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12562 rc = BAD_VALUE;
12563 }
12564 }
12565 }
12566
Thierry Strudel04e026f2016-10-10 11:27:36 -070012567 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012568 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012569 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012570 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12571 }
12572 if (m_bVideoHdrEnabled)
12573 vhdr = CAM_VIDEO_HDR_MODE_ON;
12574
Thierry Strudel54dc9782017-02-15 12:12:10 -080012575 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12576
12577 if(vhdr != curr_hdr_state)
12578 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12579
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012580 rc = setVideoHdrMode(mParameters, vhdr);
12581 if (rc != NO_ERROR) {
12582 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012583 }
12584
12585 //IR
12586 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12587 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12588 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012589 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12590 uint8_t isIRon = 0;
12591
12592 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012593 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12594 LOGE("Invalid IR mode %d!", fwk_ir);
12595 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012596 if(isIRon != curr_ir_state )
12597 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12598
Thierry Strudel04e026f2016-10-10 11:27:36 -070012599 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12600 CAM_INTF_META_IR_MODE, fwk_ir)) {
12601 rc = BAD_VALUE;
12602 }
12603 }
12604 }
12605
Thierry Strudel54dc9782017-02-15 12:12:10 -080012606 //Binning Correction Mode
12607 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12608 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12609 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12610 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12611 || (0 > fwk_binning_correction)) {
12612 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12613 } else {
12614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12615 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12616 rc = BAD_VALUE;
12617 }
12618 }
12619 }
12620
Thierry Strudel269c81a2016-10-12 12:13:59 -070012621 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12622 float aec_speed;
12623 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12624 LOGD("AEC Speed :%f", aec_speed);
12625 if ( aec_speed < 0 ) {
12626 LOGE("Invalid AEC mode %f!", aec_speed);
12627 } else {
12628 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12629 aec_speed)) {
12630 rc = BAD_VALUE;
12631 }
12632 }
12633 }
12634
12635 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12636 float awb_speed;
12637 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12638 LOGD("AWB Speed :%f", awb_speed);
12639 if ( awb_speed < 0 ) {
12640 LOGE("Invalid AWB mode %f!", awb_speed);
12641 } else {
12642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12643 awb_speed)) {
12644 rc = BAD_VALUE;
12645 }
12646 }
12647 }
12648
Thierry Strudel3d639192016-09-09 11:52:26 -070012649 // TNR
12650 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12651 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12652 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012653 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012654 cam_denoise_param_t tnr;
12655 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12656 tnr.process_plates =
12657 (cam_denoise_process_type_t)frame_settings.find(
12658 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12659 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012660
12661 if(b_TnrRequested != curr_tnr_state)
12662 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12663
Thierry Strudel3d639192016-09-09 11:52:26 -070012664 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12665 rc = BAD_VALUE;
12666 }
12667 }
12668
Thierry Strudel54dc9782017-02-15 12:12:10 -080012669 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012670 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012671 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012672 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12673 *exposure_metering_mode)) {
12674 rc = BAD_VALUE;
12675 }
12676 }
12677
Thierry Strudel3d639192016-09-09 11:52:26 -070012678 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12679 int32_t fwk_testPatternMode =
12680 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12681 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12682 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12683
12684 if (NAME_NOT_FOUND != testPatternMode) {
12685 cam_test_pattern_data_t testPatternData;
12686 memset(&testPatternData, 0, sizeof(testPatternData));
12687 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12688 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12689 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12690 int32_t *fwk_testPatternData =
12691 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12692 testPatternData.r = fwk_testPatternData[0];
12693 testPatternData.b = fwk_testPatternData[3];
12694 switch (gCamCapability[mCameraId]->color_arrangement) {
12695 case CAM_FILTER_ARRANGEMENT_RGGB:
12696 case CAM_FILTER_ARRANGEMENT_GRBG:
12697 testPatternData.gr = fwk_testPatternData[1];
12698 testPatternData.gb = fwk_testPatternData[2];
12699 break;
12700 case CAM_FILTER_ARRANGEMENT_GBRG:
12701 case CAM_FILTER_ARRANGEMENT_BGGR:
12702 testPatternData.gr = fwk_testPatternData[2];
12703 testPatternData.gb = fwk_testPatternData[1];
12704 break;
12705 default:
12706 LOGE("color arrangement %d is not supported",
12707 gCamCapability[mCameraId]->color_arrangement);
12708 break;
12709 }
12710 }
12711 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12712 testPatternData)) {
12713 rc = BAD_VALUE;
12714 }
12715 } else {
12716 LOGE("Invalid framework sensor test pattern mode %d",
12717 fwk_testPatternMode);
12718 }
12719 }
12720
12721 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12722 size_t count = 0;
12723 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12724 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12725 gps_coords.data.d, gps_coords.count, count);
12726 if (gps_coords.count != count) {
12727 rc = BAD_VALUE;
12728 }
12729 }
12730
12731 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12732 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12733 size_t count = 0;
12734 const char *gps_methods_src = (const char *)
12735 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12736 memset(gps_methods, '\0', sizeof(gps_methods));
12737 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12738 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12739 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12740 if (GPS_PROCESSING_METHOD_SIZE != count) {
12741 rc = BAD_VALUE;
12742 }
12743 }
12744
12745 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12746 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12747 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12748 gps_timestamp)) {
12749 rc = BAD_VALUE;
12750 }
12751 }
12752
12753 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12754 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12755 cam_rotation_info_t rotation_info;
12756 if (orientation == 0) {
12757 rotation_info.rotation = ROTATE_0;
12758 } else if (orientation == 90) {
12759 rotation_info.rotation = ROTATE_90;
12760 } else if (orientation == 180) {
12761 rotation_info.rotation = ROTATE_180;
12762 } else if (orientation == 270) {
12763 rotation_info.rotation = ROTATE_270;
12764 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012765 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012766 rotation_info.streamId = snapshotStreamId;
12767 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12768 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12769 rc = BAD_VALUE;
12770 }
12771 }
12772
12773 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12774 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12775 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12776 rc = BAD_VALUE;
12777 }
12778 }
12779
12780 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12781 uint32_t thumb_quality = (uint32_t)
12782 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12783 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12784 thumb_quality)) {
12785 rc = BAD_VALUE;
12786 }
12787 }
12788
12789 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12790 cam_dimension_t dim;
12791 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12792 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12793 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12794 rc = BAD_VALUE;
12795 }
12796 }
12797
12798 // Internal metadata
12799 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12800 size_t count = 0;
12801 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12802 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12803 privatedata.data.i32, privatedata.count, count);
12804 if (privatedata.count != count) {
12805 rc = BAD_VALUE;
12806 }
12807 }
12808
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012809 // ISO/Exposure Priority
12810 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12811 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12812 cam_priority_mode_t mode =
12813 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12814 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12815 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12816 use_iso_exp_pty.previewOnly = FALSE;
12817 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12818 use_iso_exp_pty.value = *ptr;
12819
12820 if(CAM_ISO_PRIORITY == mode) {
12821 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12822 use_iso_exp_pty)) {
12823 rc = BAD_VALUE;
12824 }
12825 }
12826 else {
12827 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12828 use_iso_exp_pty)) {
12829 rc = BAD_VALUE;
12830 }
12831 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012832
12833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12834 rc = BAD_VALUE;
12835 }
12836 }
12837 } else {
12838 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12839 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012840 }
12841 }
12842
12843 // Saturation
12844 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12845 int32_t* use_saturation =
12846 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12848 rc = BAD_VALUE;
12849 }
12850 }
12851
Thierry Strudel3d639192016-09-09 11:52:26 -070012852 // EV step
12853 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12854 gCamCapability[mCameraId]->exp_compensation_step)) {
12855 rc = BAD_VALUE;
12856 }
12857
12858 // CDS info
12859 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12860 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12861 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12862
12863 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12864 CAM_INTF_META_CDS_DATA, *cdsData)) {
12865 rc = BAD_VALUE;
12866 }
12867 }
12868
Shuzhen Wang19463d72016-03-08 11:09:52 -080012869 // Hybrid AE
12870 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12871 uint8_t *hybrid_ae = (uint8_t *)
12872 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12873
12874 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12875 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12876 rc = BAD_VALUE;
12877 }
12878 }
12879
Shuzhen Wang14415f52016-11-16 18:26:18 -080012880 // Histogram
12881 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12882 uint8_t histogramMode =
12883 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12884 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12885 histogramMode)) {
12886 rc = BAD_VALUE;
12887 }
12888 }
12889
12890 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12891 int32_t histogramBins =
12892 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12893 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12894 histogramBins)) {
12895 rc = BAD_VALUE;
12896 }
12897 }
12898
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012899 // Tracking AF
12900 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12901 uint8_t trackingAfTrigger =
12902 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12904 trackingAfTrigger)) {
12905 rc = BAD_VALUE;
12906 }
12907 }
12908
Thierry Strudel3d639192016-09-09 11:52:26 -070012909 return rc;
12910}
12911
12912/*===========================================================================
12913 * FUNCTION : captureResultCb
12914 *
12915 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12916 *
12917 * PARAMETERS :
12918 * @frame : frame information from mm-camera-interface
12919 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12920 * @userdata: userdata
12921 *
12922 * RETURN : NONE
12923 *==========================================================================*/
12924void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12925 camera3_stream_buffer_t *buffer,
12926 uint32_t frame_number, bool isInputBuffer, void *userdata)
12927{
12928 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12929 if (hw == NULL) {
12930 LOGE("Invalid hw %p", hw);
12931 return;
12932 }
12933
12934 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12935 return;
12936}
12937
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012938/*===========================================================================
12939 * FUNCTION : setBufferErrorStatus
12940 *
12941 * DESCRIPTION: Callback handler for channels to report any buffer errors
12942 *
12943 * PARAMETERS :
12944 * @ch : Channel on which buffer error is reported from
12945 * @frame_number : frame number on which buffer error is reported on
12946 * @buffer_status : buffer error status
12947 * @userdata: userdata
12948 *
12949 * RETURN : NONE
12950 *==========================================================================*/
12951void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12952 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12953{
12954 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12955 if (hw == NULL) {
12956 LOGE("Invalid hw %p", hw);
12957 return;
12958 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012959
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012960 hw->setBufferErrorStatus(ch, frame_number, err);
12961 return;
12962}
12963
12964void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12965 uint32_t frameNumber, camera3_buffer_status_t err)
12966{
12967 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12968 pthread_mutex_lock(&mMutex);
12969
12970 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12971 if (req.frame_number != frameNumber)
12972 continue;
12973 for (auto& k : req.mPendingBufferList) {
12974 if(k.stream->priv == ch) {
12975 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12976 }
12977 }
12978 }
12979
12980 pthread_mutex_unlock(&mMutex);
12981 return;
12982}
Thierry Strudel3d639192016-09-09 11:52:26 -070012983/*===========================================================================
12984 * FUNCTION : initialize
12985 *
12986 * DESCRIPTION: Pass framework callback pointers to HAL
12987 *
12988 * PARAMETERS :
12989 *
12990 *
12991 * RETURN : Success : 0
12992 * Failure: -ENODEV
12993 *==========================================================================*/
12994
12995int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12996 const camera3_callback_ops_t *callback_ops)
12997{
12998 LOGD("E");
12999 QCamera3HardwareInterface *hw =
13000 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13001 if (!hw) {
13002 LOGE("NULL camera device");
13003 return -ENODEV;
13004 }
13005
13006 int rc = hw->initialize(callback_ops);
13007 LOGD("X");
13008 return rc;
13009}
13010
13011/*===========================================================================
13012 * FUNCTION : configure_streams
13013 *
13014 * DESCRIPTION:
13015 *
13016 * PARAMETERS :
13017 *
13018 *
13019 * RETURN : Success: 0
13020 * Failure: -EINVAL (if stream configuration is invalid)
13021 * -ENODEV (fatal error)
13022 *==========================================================================*/
13023
13024int QCamera3HardwareInterface::configure_streams(
13025 const struct camera3_device *device,
13026 camera3_stream_configuration_t *stream_list)
13027{
13028 LOGD("E");
13029 QCamera3HardwareInterface *hw =
13030 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13031 if (!hw) {
13032 LOGE("NULL camera device");
13033 return -ENODEV;
13034 }
13035 int rc = hw->configureStreams(stream_list);
13036 LOGD("X");
13037 return rc;
13038}
13039
13040/*===========================================================================
13041 * FUNCTION : construct_default_request_settings
13042 *
13043 * DESCRIPTION: Configure a settings buffer to meet the required use case
13044 *
13045 * PARAMETERS :
13046 *
13047 *
13048 * RETURN : Success: Return valid metadata
13049 * Failure: Return NULL
13050 *==========================================================================*/
13051const camera_metadata_t* QCamera3HardwareInterface::
13052 construct_default_request_settings(const struct camera3_device *device,
13053 int type)
13054{
13055
13056 LOGD("E");
13057 camera_metadata_t* fwk_metadata = NULL;
13058 QCamera3HardwareInterface *hw =
13059 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13060 if (!hw) {
13061 LOGE("NULL camera device");
13062 return NULL;
13063 }
13064
13065 fwk_metadata = hw->translateCapabilityToMetadata(type);
13066
13067 LOGD("X");
13068 return fwk_metadata;
13069}
13070
13071/*===========================================================================
13072 * FUNCTION : process_capture_request
13073 *
13074 * DESCRIPTION:
13075 *
13076 * PARAMETERS :
13077 *
13078 *
13079 * RETURN :
13080 *==========================================================================*/
13081int QCamera3HardwareInterface::process_capture_request(
13082 const struct camera3_device *device,
13083 camera3_capture_request_t *request)
13084{
13085 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013086 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013087 QCamera3HardwareInterface *hw =
13088 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13089 if (!hw) {
13090 LOGE("NULL camera device");
13091 return -EINVAL;
13092 }
13093
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013094 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013095 LOGD("X");
13096 return rc;
13097}
13098
13099/*===========================================================================
13100 * FUNCTION : dump
13101 *
13102 * DESCRIPTION:
13103 *
13104 * PARAMETERS :
13105 *
13106 *
13107 * RETURN :
13108 *==========================================================================*/
13109
13110void QCamera3HardwareInterface::dump(
13111 const struct camera3_device *device, int fd)
13112{
13113 /* Log level property is read when "adb shell dumpsys media.camera" is
13114 called so that the log level can be controlled without restarting
13115 the media server */
13116 getLogLevel();
13117
13118 LOGD("E");
13119 QCamera3HardwareInterface *hw =
13120 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13121 if (!hw) {
13122 LOGE("NULL camera device");
13123 return;
13124 }
13125
13126 hw->dump(fd);
13127 LOGD("X");
13128 return;
13129}
13130
13131/*===========================================================================
13132 * FUNCTION : flush
13133 *
13134 * DESCRIPTION:
13135 *
13136 * PARAMETERS :
13137 *
13138 *
13139 * RETURN :
13140 *==========================================================================*/
13141
13142int QCamera3HardwareInterface::flush(
13143 const struct camera3_device *device)
13144{
13145 int rc;
13146 LOGD("E");
13147 QCamera3HardwareInterface *hw =
13148 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13149 if (!hw) {
13150 LOGE("NULL camera device");
13151 return -EINVAL;
13152 }
13153
13154 pthread_mutex_lock(&hw->mMutex);
13155 // Validate current state
13156 switch (hw->mState) {
13157 case STARTED:
13158 /* valid state */
13159 break;
13160
13161 case ERROR:
13162 pthread_mutex_unlock(&hw->mMutex);
13163 hw->handleCameraDeviceError();
13164 return -ENODEV;
13165
13166 default:
13167 LOGI("Flush returned during state %d", hw->mState);
13168 pthread_mutex_unlock(&hw->mMutex);
13169 return 0;
13170 }
13171 pthread_mutex_unlock(&hw->mMutex);
13172
13173 rc = hw->flush(true /* restart channels */ );
13174 LOGD("X");
13175 return rc;
13176}
13177
13178/*===========================================================================
13179 * FUNCTION : close_camera_device
13180 *
13181 * DESCRIPTION:
13182 *
13183 * PARAMETERS :
13184 *
13185 *
13186 * RETURN :
13187 *==========================================================================*/
13188int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13189{
13190 int ret = NO_ERROR;
13191 QCamera3HardwareInterface *hw =
13192 reinterpret_cast<QCamera3HardwareInterface *>(
13193 reinterpret_cast<camera3_device_t *>(device)->priv);
13194 if (!hw) {
13195 LOGE("NULL camera device");
13196 return BAD_VALUE;
13197 }
13198
13199 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13200 delete hw;
13201 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013202 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013203 return ret;
13204}
13205
13206/*===========================================================================
13207 * FUNCTION : getWaveletDenoiseProcessPlate
13208 *
13209 * DESCRIPTION: query wavelet denoise process plate
13210 *
13211 * PARAMETERS : None
13212 *
13213 * RETURN : WNR prcocess plate value
13214 *==========================================================================*/
13215cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13216{
13217 char prop[PROPERTY_VALUE_MAX];
13218 memset(prop, 0, sizeof(prop));
13219 property_get("persist.denoise.process.plates", prop, "0");
13220 int processPlate = atoi(prop);
13221 switch(processPlate) {
13222 case 0:
13223 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13224 case 1:
13225 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13226 case 2:
13227 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13228 case 3:
13229 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13230 default:
13231 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13232 }
13233}
13234
13235
13236/*===========================================================================
13237 * FUNCTION : getTemporalDenoiseProcessPlate
13238 *
13239 * DESCRIPTION: query temporal denoise process plate
13240 *
13241 * PARAMETERS : None
13242 *
13243 * RETURN : TNR prcocess plate value
13244 *==========================================================================*/
13245cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13246{
13247 char prop[PROPERTY_VALUE_MAX];
13248 memset(prop, 0, sizeof(prop));
13249 property_get("persist.tnr.process.plates", prop, "0");
13250 int processPlate = atoi(prop);
13251 switch(processPlate) {
13252 case 0:
13253 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13254 case 1:
13255 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13256 case 2:
13257 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13258 case 3:
13259 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13260 default:
13261 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13262 }
13263}
13264
13265
13266/*===========================================================================
13267 * FUNCTION : extractSceneMode
13268 *
13269 * DESCRIPTION: Extract scene mode from frameworks set metadata
13270 *
13271 * PARAMETERS :
13272 * @frame_settings: CameraMetadata reference
13273 * @metaMode: ANDROID_CONTORL_MODE
13274 * @hal_metadata: hal metadata structure
13275 *
13276 * RETURN : None
13277 *==========================================================================*/
13278int32_t QCamera3HardwareInterface::extractSceneMode(
13279 const CameraMetadata &frame_settings, uint8_t metaMode,
13280 metadata_buffer_t *hal_metadata)
13281{
13282 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013283 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13284
13285 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13286 LOGD("Ignoring control mode OFF_KEEP_STATE");
13287 return NO_ERROR;
13288 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013289
13290 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13291 camera_metadata_ro_entry entry =
13292 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13293 if (0 == entry.count)
13294 return rc;
13295
13296 uint8_t fwk_sceneMode = entry.data.u8[0];
13297
13298 int val = lookupHalName(SCENE_MODES_MAP,
13299 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13300 fwk_sceneMode);
13301 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013302 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013303 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013304 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013305 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013306
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013307 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13308 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13309 }
13310
13311 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13312 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013313 cam_hdr_param_t hdr_params;
13314 hdr_params.hdr_enable = 1;
13315 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13316 hdr_params.hdr_need_1x = false;
13317 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13318 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13319 rc = BAD_VALUE;
13320 }
13321 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013322
Thierry Strudel3d639192016-09-09 11:52:26 -070013323 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13324 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13325 rc = BAD_VALUE;
13326 }
13327 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013328
13329 if (mForceHdrSnapshot) {
13330 cam_hdr_param_t hdr_params;
13331 hdr_params.hdr_enable = 1;
13332 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13333 hdr_params.hdr_need_1x = false;
13334 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13335 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13336 rc = BAD_VALUE;
13337 }
13338 }
13339
Thierry Strudel3d639192016-09-09 11:52:26 -070013340 return rc;
13341}
13342
13343/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013344 * FUNCTION : setVideoHdrMode
13345 *
13346 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13347 *
13348 * PARAMETERS :
13349 * @hal_metadata: hal metadata structure
13350 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13351 *
13352 * RETURN : None
13353 *==========================================================================*/
13354int32_t QCamera3HardwareInterface::setVideoHdrMode(
13355 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13356{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013357 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13358 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13359 }
13360
13361 LOGE("Invalid Video HDR mode %d!", vhdr);
13362 return BAD_VALUE;
13363}
13364
13365/*===========================================================================
13366 * FUNCTION : setSensorHDR
13367 *
13368 * DESCRIPTION: Enable/disable sensor HDR.
13369 *
13370 * PARAMETERS :
13371 * @hal_metadata: hal metadata structure
13372 * @enable: boolean whether to enable/disable sensor HDR
13373 *
13374 * RETURN : None
13375 *==========================================================================*/
13376int32_t QCamera3HardwareInterface::setSensorHDR(
13377 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13378{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013379 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013380 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13381
13382 if (enable) {
13383 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13384 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13385 #ifdef _LE_CAMERA_
13386 //Default to staggered HDR for IOT
13387 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13388 #else
13389 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13390 #endif
13391 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13392 }
13393
13394 bool isSupported = false;
13395 switch (sensor_hdr) {
13396 case CAM_SENSOR_HDR_IN_SENSOR:
13397 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13398 CAM_QCOM_FEATURE_SENSOR_HDR) {
13399 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013400 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013401 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013402 break;
13403 case CAM_SENSOR_HDR_ZIGZAG:
13404 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13405 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13406 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013407 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013408 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013409 break;
13410 case CAM_SENSOR_HDR_STAGGERED:
13411 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13412 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13413 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013414 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013415 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013416 break;
13417 case CAM_SENSOR_HDR_OFF:
13418 isSupported = true;
13419 LOGD("Turning off sensor HDR");
13420 break;
13421 default:
13422 LOGE("HDR mode %d not supported", sensor_hdr);
13423 rc = BAD_VALUE;
13424 break;
13425 }
13426
13427 if(isSupported) {
13428 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13429 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13430 rc = BAD_VALUE;
13431 } else {
13432 if(!isVideoHdrEnable)
13433 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013434 }
13435 }
13436 return rc;
13437}
13438
13439/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013440 * FUNCTION : needRotationReprocess
13441 *
13442 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13443 *
13444 * PARAMETERS : none
13445 *
13446 * RETURN : true: needed
13447 * false: no need
13448 *==========================================================================*/
13449bool QCamera3HardwareInterface::needRotationReprocess()
13450{
13451 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13452 // current rotation is not zero, and pp has the capability to process rotation
13453 LOGH("need do reprocess for rotation");
13454 return true;
13455 }
13456
13457 return false;
13458}
13459
13460/*===========================================================================
13461 * FUNCTION : needReprocess
13462 *
13463 * DESCRIPTION: if reprocess in needed
13464 *
13465 * PARAMETERS : none
13466 *
13467 * RETURN : true: needed
13468 * false: no need
13469 *==========================================================================*/
13470bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13471{
13472 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13473 // TODO: add for ZSL HDR later
13474 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13475 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13476 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13477 return true;
13478 } else {
13479 LOGH("already post processed frame");
13480 return false;
13481 }
13482 }
13483 return needRotationReprocess();
13484}
13485
13486/*===========================================================================
13487 * FUNCTION : needJpegExifRotation
13488 *
13489 * DESCRIPTION: if rotation from jpeg is needed
13490 *
13491 * PARAMETERS : none
13492 *
13493 * RETURN : true: needed
13494 * false: no need
13495 *==========================================================================*/
13496bool QCamera3HardwareInterface::needJpegExifRotation()
13497{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013498 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013499 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13500 LOGD("Need use Jpeg EXIF Rotation");
13501 return true;
13502 }
13503 return false;
13504}
13505
13506/*===========================================================================
13507 * FUNCTION : addOfflineReprocChannel
13508 *
13509 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13510 * coming from input channel
13511 *
13512 * PARAMETERS :
13513 * @config : reprocess configuration
13514 * @inputChHandle : pointer to the input (source) channel
13515 *
13516 *
13517 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13518 *==========================================================================*/
13519QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13520 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13521{
13522 int32_t rc = NO_ERROR;
13523 QCamera3ReprocessChannel *pChannel = NULL;
13524
13525 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013526 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13527 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013528 if (NULL == pChannel) {
13529 LOGE("no mem for reprocess channel");
13530 return NULL;
13531 }
13532
13533 rc = pChannel->initialize(IS_TYPE_NONE);
13534 if (rc != NO_ERROR) {
13535 LOGE("init reprocess channel failed, ret = %d", rc);
13536 delete pChannel;
13537 return NULL;
13538 }
13539
13540 // pp feature config
13541 cam_pp_feature_config_t pp_config;
13542 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13543
13544 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13545 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13546 & CAM_QCOM_FEATURE_DSDN) {
13547 //Use CPP CDS incase h/w supports it.
13548 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13549 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13550 }
13551 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13552 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13553 }
13554
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013555 if (config.hdr_param.hdr_enable) {
13556 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13557 pp_config.hdr_param = config.hdr_param;
13558 }
13559
13560 if (mForceHdrSnapshot) {
13561 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13562 pp_config.hdr_param.hdr_enable = 1;
13563 pp_config.hdr_param.hdr_need_1x = 0;
13564 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13565 }
13566
Thierry Strudel3d639192016-09-09 11:52:26 -070013567 rc = pChannel->addReprocStreamsFromSource(pp_config,
13568 config,
13569 IS_TYPE_NONE,
13570 mMetadataChannel);
13571
13572 if (rc != NO_ERROR) {
13573 delete pChannel;
13574 return NULL;
13575 }
13576 return pChannel;
13577}
13578
13579/*===========================================================================
13580 * FUNCTION : getMobicatMask
13581 *
13582 * DESCRIPTION: returns mobicat mask
13583 *
13584 * PARAMETERS : none
13585 *
13586 * RETURN : mobicat mask
13587 *
13588 *==========================================================================*/
13589uint8_t QCamera3HardwareInterface::getMobicatMask()
13590{
13591 return m_MobicatMask;
13592}
13593
13594/*===========================================================================
13595 * FUNCTION : setMobicat
13596 *
13597 * DESCRIPTION: set Mobicat on/off.
13598 *
13599 * PARAMETERS :
13600 * @params : none
13601 *
13602 * RETURN : int32_t type of status
13603 * NO_ERROR -- success
13604 * none-zero failure code
13605 *==========================================================================*/
13606int32_t QCamera3HardwareInterface::setMobicat()
13607{
13608 char value [PROPERTY_VALUE_MAX];
13609 property_get("persist.camera.mobicat", value, "0");
13610 int32_t ret = NO_ERROR;
13611 uint8_t enableMobi = (uint8_t)atoi(value);
13612
13613 if (enableMobi) {
13614 tune_cmd_t tune_cmd;
13615 tune_cmd.type = SET_RELOAD_CHROMATIX;
13616 tune_cmd.module = MODULE_ALL;
13617 tune_cmd.value = TRUE;
13618 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13619 CAM_INTF_PARM_SET_VFE_COMMAND,
13620 tune_cmd);
13621
13622 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13623 CAM_INTF_PARM_SET_PP_COMMAND,
13624 tune_cmd);
13625 }
13626 m_MobicatMask = enableMobi;
13627
13628 return ret;
13629}
13630
13631/*===========================================================================
13632* FUNCTION : getLogLevel
13633*
13634* DESCRIPTION: Reads the log level property into a variable
13635*
13636* PARAMETERS :
13637* None
13638*
13639* RETURN :
13640* None
13641*==========================================================================*/
13642void QCamera3HardwareInterface::getLogLevel()
13643{
13644 char prop[PROPERTY_VALUE_MAX];
13645 uint32_t globalLogLevel = 0;
13646
13647 property_get("persist.camera.hal.debug", prop, "0");
13648 int val = atoi(prop);
13649 if (0 <= val) {
13650 gCamHal3LogLevel = (uint32_t)val;
13651 }
13652
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013653 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013654 gKpiDebugLevel = atoi(prop);
13655
13656 property_get("persist.camera.global.debug", prop, "0");
13657 val = atoi(prop);
13658 if (0 <= val) {
13659 globalLogLevel = (uint32_t)val;
13660 }
13661
13662 /* Highest log level among hal.logs and global.logs is selected */
13663 if (gCamHal3LogLevel < globalLogLevel)
13664 gCamHal3LogLevel = globalLogLevel;
13665
13666 return;
13667}
13668
13669/*===========================================================================
13670 * FUNCTION : validateStreamRotations
13671 *
13672 * DESCRIPTION: Check if the rotations requested are supported
13673 *
13674 * PARAMETERS :
13675 * @stream_list : streams to be configured
13676 *
13677 * RETURN : NO_ERROR on success
13678 * -EINVAL on failure
13679 *
13680 *==========================================================================*/
13681int QCamera3HardwareInterface::validateStreamRotations(
13682 camera3_stream_configuration_t *streamList)
13683{
13684 int rc = NO_ERROR;
13685
13686 /*
13687 * Loop through all streams requested in configuration
13688 * Check if unsupported rotations have been requested on any of them
13689 */
13690 for (size_t j = 0; j < streamList->num_streams; j++){
13691 camera3_stream_t *newStream = streamList->streams[j];
13692
Emilian Peev35ceeed2017-06-29 11:58:56 -070013693 switch(newStream->rotation) {
13694 case CAMERA3_STREAM_ROTATION_0:
13695 case CAMERA3_STREAM_ROTATION_90:
13696 case CAMERA3_STREAM_ROTATION_180:
13697 case CAMERA3_STREAM_ROTATION_270:
13698 //Expected values
13699 break;
13700 default:
13701 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13702 "type:%d and stream format:%d", __func__,
13703 newStream->rotation, newStream->stream_type,
13704 newStream->format);
13705 return -EINVAL;
13706 }
13707
Thierry Strudel3d639192016-09-09 11:52:26 -070013708 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13709 bool isImplDef = (newStream->format ==
13710 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13711 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13712 isImplDef);
13713
13714 if (isRotated && (!isImplDef || isZsl)) {
13715 LOGE("Error: Unsupported rotation of %d requested for stream"
13716 "type:%d and stream format:%d",
13717 newStream->rotation, newStream->stream_type,
13718 newStream->format);
13719 rc = -EINVAL;
13720 break;
13721 }
13722 }
13723
13724 return rc;
13725}
13726
13727/*===========================================================================
13728* FUNCTION : getFlashInfo
13729*
13730* DESCRIPTION: Retrieve information about whether the device has a flash.
13731*
13732* PARAMETERS :
13733* @cameraId : Camera id to query
13734* @hasFlash : Boolean indicating whether there is a flash device
13735* associated with given camera
13736* @flashNode : If a flash device exists, this will be its device node.
13737*
13738* RETURN :
13739* None
13740*==========================================================================*/
13741void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13742 bool& hasFlash,
13743 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13744{
13745 cam_capability_t* camCapability = gCamCapability[cameraId];
13746 if (NULL == camCapability) {
13747 hasFlash = false;
13748 flashNode[0] = '\0';
13749 } else {
13750 hasFlash = camCapability->flash_available;
13751 strlcpy(flashNode,
13752 (char*)camCapability->flash_dev_name,
13753 QCAMERA_MAX_FILEPATH_LENGTH);
13754 }
13755}
13756
13757/*===========================================================================
13758* FUNCTION : getEepromVersionInfo
13759*
13760* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13761*
13762* PARAMETERS : None
13763*
13764* RETURN : string describing EEPROM version
13765* "\0" if no such info available
13766*==========================================================================*/
13767const char *QCamera3HardwareInterface::getEepromVersionInfo()
13768{
13769 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13770}
13771
13772/*===========================================================================
13773* FUNCTION : getLdafCalib
13774*
13775* DESCRIPTION: Retrieve Laser AF calibration data
13776*
13777* PARAMETERS : None
13778*
13779* RETURN : Two uint32_t describing laser AF calibration data
13780* NULL if none is available.
13781*==========================================================================*/
13782const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13783{
13784 if (mLdafCalibExist) {
13785 return &mLdafCalib[0];
13786 } else {
13787 return NULL;
13788 }
13789}
13790
13791/*===========================================================================
13792 * FUNCTION : dynamicUpdateMetaStreamInfo
13793 *
13794 * DESCRIPTION: This function:
13795 * (1) stops all the channels
13796 * (2) returns error on pending requests and buffers
13797 * (3) sends metastream_info in setparams
13798 * (4) starts all channels
13799 * This is useful when sensor has to be restarted to apply any
13800 * settings such as frame rate from a different sensor mode
13801 *
13802 * PARAMETERS : None
13803 *
13804 * RETURN : NO_ERROR on success
13805 * Error codes on failure
13806 *
13807 *==========================================================================*/
13808int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13809{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013810 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013811 int rc = NO_ERROR;
13812
13813 LOGD("E");
13814
13815 rc = stopAllChannels();
13816 if (rc < 0) {
13817 LOGE("stopAllChannels failed");
13818 return rc;
13819 }
13820
13821 rc = notifyErrorForPendingRequests();
13822 if (rc < 0) {
13823 LOGE("notifyErrorForPendingRequests failed");
13824 return rc;
13825 }
13826
13827 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13828 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13829 "Format:%d",
13830 mStreamConfigInfo.type[i],
13831 mStreamConfigInfo.stream_sizes[i].width,
13832 mStreamConfigInfo.stream_sizes[i].height,
13833 mStreamConfigInfo.postprocess_mask[i],
13834 mStreamConfigInfo.format[i]);
13835 }
13836
13837 /* Send meta stream info once again so that ISP can start */
13838 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13839 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13840 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13841 mParameters);
13842 if (rc < 0) {
13843 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13844 }
13845
13846 rc = startAllChannels();
13847 if (rc < 0) {
13848 LOGE("startAllChannels failed");
13849 return rc;
13850 }
13851
13852 LOGD("X");
13853 return rc;
13854}
13855
13856/*===========================================================================
13857 * FUNCTION : stopAllChannels
13858 *
13859 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13860 *
13861 * PARAMETERS : None
13862 *
13863 * RETURN : NO_ERROR on success
13864 * Error codes on failure
13865 *
13866 *==========================================================================*/
13867int32_t QCamera3HardwareInterface::stopAllChannels()
13868{
13869 int32_t rc = NO_ERROR;
13870
13871 LOGD("Stopping all channels");
13872 // Stop the Streams/Channels
13873 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13874 it != mStreamInfo.end(); it++) {
13875 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13876 if (channel) {
13877 channel->stop();
13878 }
13879 (*it)->status = INVALID;
13880 }
13881
13882 if (mSupportChannel) {
13883 mSupportChannel->stop();
13884 }
13885 if (mAnalysisChannel) {
13886 mAnalysisChannel->stop();
13887 }
13888 if (mRawDumpChannel) {
13889 mRawDumpChannel->stop();
13890 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013891 if (mHdrPlusRawSrcChannel) {
13892 mHdrPlusRawSrcChannel->stop();
13893 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013894 if (mMetadataChannel) {
13895 /* If content of mStreamInfo is not 0, there is metadata stream */
13896 mMetadataChannel->stop();
13897 }
13898
13899 LOGD("All channels stopped");
13900 return rc;
13901}
13902
13903/*===========================================================================
13904 * FUNCTION : startAllChannels
13905 *
13906 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13907 *
13908 * PARAMETERS : None
13909 *
13910 * RETURN : NO_ERROR on success
13911 * Error codes on failure
13912 *
13913 *==========================================================================*/
13914int32_t QCamera3HardwareInterface::startAllChannels()
13915{
13916 int32_t rc = NO_ERROR;
13917
13918 LOGD("Start all channels ");
13919 // Start the Streams/Channels
13920 if (mMetadataChannel) {
13921 /* If content of mStreamInfo is not 0, there is metadata stream */
13922 rc = mMetadataChannel->start();
13923 if (rc < 0) {
13924 LOGE("META channel start failed");
13925 return rc;
13926 }
13927 }
13928 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13929 it != mStreamInfo.end(); it++) {
13930 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13931 if (channel) {
13932 rc = channel->start();
13933 if (rc < 0) {
13934 LOGE("channel start failed");
13935 return rc;
13936 }
13937 }
13938 }
13939 if (mAnalysisChannel) {
13940 mAnalysisChannel->start();
13941 }
13942 if (mSupportChannel) {
13943 rc = mSupportChannel->start();
13944 if (rc < 0) {
13945 LOGE("Support channel start failed");
13946 return rc;
13947 }
13948 }
13949 if (mRawDumpChannel) {
13950 rc = mRawDumpChannel->start();
13951 if (rc < 0) {
13952 LOGE("RAW dump channel start failed");
13953 return rc;
13954 }
13955 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013956 if (mHdrPlusRawSrcChannel) {
13957 rc = mHdrPlusRawSrcChannel->start();
13958 if (rc < 0) {
13959 LOGE("HDR+ RAW channel start failed");
13960 return rc;
13961 }
13962 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013963
13964 LOGD("All channels started");
13965 return rc;
13966}
13967
13968/*===========================================================================
13969 * FUNCTION : notifyErrorForPendingRequests
13970 *
13971 * DESCRIPTION: This function sends error for all the pending requests/buffers
13972 *
13973 * PARAMETERS : None
13974 *
13975 * RETURN : Error codes
13976 * NO_ERROR on success
13977 *
13978 *==========================================================================*/
13979int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13980{
Emilian Peev7650c122017-01-19 08:24:33 -080013981 notifyErrorFoPendingDepthData(mDepthChannel);
13982
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013983 auto pendingRequest = mPendingRequestsList.begin();
13984 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013985
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013986 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13987 // buffers (for which buffers aren't sent yet).
13988 while (pendingRequest != mPendingRequestsList.end() ||
13989 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13990 if (pendingRequest == mPendingRequestsList.end() ||
13991 pendingBuffer->frame_number < pendingRequest->frame_number) {
13992 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13993 // with error.
13994 for (auto &info : pendingBuffer->mPendingBufferList) {
13995 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013996 camera3_notify_msg_t notify_msg;
13997 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13998 notify_msg.type = CAMERA3_MSG_ERROR;
13999 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014000 notify_msg.message.error.error_stream = info.stream;
14001 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014002 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014003
14004 camera3_stream_buffer_t buffer = {};
14005 buffer.acquire_fence = -1;
14006 buffer.release_fence = -1;
14007 buffer.buffer = info.buffer;
14008 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14009 buffer.stream = info.stream;
14010 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014011 }
14012
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014013 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14014 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14015 pendingBuffer->frame_number > pendingRequest->frame_number) {
14016 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014017 camera3_notify_msg_t notify_msg;
14018 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14019 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014020 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14021 notify_msg.message.error.error_stream = nullptr;
14022 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014023 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014024
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014025 if (pendingRequest->input_buffer != nullptr) {
14026 camera3_capture_result result = {};
14027 result.frame_number = pendingRequest->frame_number;
14028 result.result = nullptr;
14029 result.input_buffer = pendingRequest->input_buffer;
14030 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014031 }
14032
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014033 mShutterDispatcher.clear(pendingRequest->frame_number);
14034 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14035 } else {
14036 // If both buffers and result metadata weren't sent yet, notify about a request error
14037 // and return buffers with error.
14038 for (auto &info : pendingBuffer->mPendingBufferList) {
14039 camera3_notify_msg_t notify_msg;
14040 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14041 notify_msg.type = CAMERA3_MSG_ERROR;
14042 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14043 notify_msg.message.error.error_stream = info.stream;
14044 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14045 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014046
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014047 camera3_stream_buffer_t buffer = {};
14048 buffer.acquire_fence = -1;
14049 buffer.release_fence = -1;
14050 buffer.buffer = info.buffer;
14051 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14052 buffer.stream = info.stream;
14053 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14054 }
14055
14056 if (pendingRequest->input_buffer != nullptr) {
14057 camera3_capture_result result = {};
14058 result.frame_number = pendingRequest->frame_number;
14059 result.result = nullptr;
14060 result.input_buffer = pendingRequest->input_buffer;
14061 orchestrateResult(&result);
14062 }
14063
14064 mShutterDispatcher.clear(pendingRequest->frame_number);
14065 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14066 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014067 }
14068 }
14069
14070 /* Reset pending frame Drop list and requests list */
14071 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014072 mShutterDispatcher.clear();
14073 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014074 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014075 LOGH("Cleared all the pending buffers ");
14076
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014077 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014078}
14079
14080bool QCamera3HardwareInterface::isOnEncoder(
14081 const cam_dimension_t max_viewfinder_size,
14082 uint32_t width, uint32_t height)
14083{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014084 return ((width > (uint32_t)max_viewfinder_size.width) ||
14085 (height > (uint32_t)max_viewfinder_size.height) ||
14086 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14087 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014088}
14089
14090/*===========================================================================
14091 * FUNCTION : setBundleInfo
14092 *
14093 * DESCRIPTION: Set bundle info for all streams that are bundle.
14094 *
14095 * PARAMETERS : None
14096 *
14097 * RETURN : NO_ERROR on success
14098 * Error codes on failure
14099 *==========================================================================*/
14100int32_t QCamera3HardwareInterface::setBundleInfo()
14101{
14102 int32_t rc = NO_ERROR;
14103
14104 if (mChannelHandle) {
14105 cam_bundle_config_t bundleInfo;
14106 memset(&bundleInfo, 0, sizeof(bundleInfo));
14107 rc = mCameraHandle->ops->get_bundle_info(
14108 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14109 if (rc != NO_ERROR) {
14110 LOGE("get_bundle_info failed");
14111 return rc;
14112 }
14113 if (mAnalysisChannel) {
14114 mAnalysisChannel->setBundleInfo(bundleInfo);
14115 }
14116 if (mSupportChannel) {
14117 mSupportChannel->setBundleInfo(bundleInfo);
14118 }
14119 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14120 it != mStreamInfo.end(); it++) {
14121 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14122 channel->setBundleInfo(bundleInfo);
14123 }
14124 if (mRawDumpChannel) {
14125 mRawDumpChannel->setBundleInfo(bundleInfo);
14126 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014127 if (mHdrPlusRawSrcChannel) {
14128 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14129 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014130 }
14131
14132 return rc;
14133}
14134
14135/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014136 * FUNCTION : setInstantAEC
14137 *
14138 * DESCRIPTION: Set Instant AEC related params.
14139 *
14140 * PARAMETERS :
14141 * @meta: CameraMetadata reference
14142 *
14143 * RETURN : NO_ERROR on success
14144 * Error codes on failure
14145 *==========================================================================*/
14146int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14147{
14148 int32_t rc = NO_ERROR;
14149 uint8_t val = 0;
14150 char prop[PROPERTY_VALUE_MAX];
14151
14152 // First try to configure instant AEC from framework metadata
14153 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14154 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14155 }
14156
14157 // If framework did not set this value, try to read from set prop.
14158 if (val == 0) {
14159 memset(prop, 0, sizeof(prop));
14160 property_get("persist.camera.instant.aec", prop, "0");
14161 val = (uint8_t)atoi(prop);
14162 }
14163
14164 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14165 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14166 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14167 mInstantAEC = val;
14168 mInstantAECSettledFrameNumber = 0;
14169 mInstantAecFrameIdxCount = 0;
14170 LOGH("instantAEC value set %d",val);
14171 if (mInstantAEC) {
14172 memset(prop, 0, sizeof(prop));
14173 property_get("persist.camera.ae.instant.bound", prop, "10");
14174 int32_t aec_frame_skip_cnt = atoi(prop);
14175 if (aec_frame_skip_cnt >= 0) {
14176 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14177 } else {
14178 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14179 rc = BAD_VALUE;
14180 }
14181 }
14182 } else {
14183 LOGE("Bad instant aec value set %d", val);
14184 rc = BAD_VALUE;
14185 }
14186 return rc;
14187}
14188
14189/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014190 * FUNCTION : get_num_overall_buffers
14191 *
14192 * DESCRIPTION: Estimate number of pending buffers across all requests.
14193 *
14194 * PARAMETERS : None
14195 *
14196 * RETURN : Number of overall pending buffers
14197 *
14198 *==========================================================================*/
14199uint32_t PendingBuffersMap::get_num_overall_buffers()
14200{
14201 uint32_t sum_buffers = 0;
14202 for (auto &req : mPendingBuffersInRequest) {
14203 sum_buffers += req.mPendingBufferList.size();
14204 }
14205 return sum_buffers;
14206}
14207
14208/*===========================================================================
14209 * FUNCTION : removeBuf
14210 *
14211 * DESCRIPTION: Remove a matching buffer from tracker.
14212 *
14213 * PARAMETERS : @buffer: image buffer for the callback
14214 *
14215 * RETURN : None
14216 *
14217 *==========================================================================*/
14218void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14219{
14220 bool buffer_found = false;
14221 for (auto req = mPendingBuffersInRequest.begin();
14222 req != mPendingBuffersInRequest.end(); req++) {
14223 for (auto k = req->mPendingBufferList.begin();
14224 k != req->mPendingBufferList.end(); k++ ) {
14225 if (k->buffer == buffer) {
14226 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14227 req->frame_number, buffer);
14228 k = req->mPendingBufferList.erase(k);
14229 if (req->mPendingBufferList.empty()) {
14230 // Remove this request from Map
14231 req = mPendingBuffersInRequest.erase(req);
14232 }
14233 buffer_found = true;
14234 break;
14235 }
14236 }
14237 if (buffer_found) {
14238 break;
14239 }
14240 }
14241 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14242 get_num_overall_buffers());
14243}
14244
14245/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014246 * FUNCTION : getBufErrStatus
14247 *
14248 * DESCRIPTION: get buffer error status
14249 *
14250 * PARAMETERS : @buffer: buffer handle
14251 *
14252 * RETURN : Error status
14253 *
14254 *==========================================================================*/
14255int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14256{
14257 for (auto& req : mPendingBuffersInRequest) {
14258 for (auto& k : req.mPendingBufferList) {
14259 if (k.buffer == buffer)
14260 return k.bufStatus;
14261 }
14262 }
14263 return CAMERA3_BUFFER_STATUS_OK;
14264}
14265
14266/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014267 * FUNCTION : setPAAFSupport
14268 *
14269 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14270 * feature mask according to stream type and filter
14271 * arrangement
14272 *
14273 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14274 * @stream_type: stream type
14275 * @filter_arrangement: filter arrangement
14276 *
14277 * RETURN : None
14278 *==========================================================================*/
14279void QCamera3HardwareInterface::setPAAFSupport(
14280 cam_feature_mask_t& feature_mask,
14281 cam_stream_type_t stream_type,
14282 cam_color_filter_arrangement_t filter_arrangement)
14283{
Thierry Strudel3d639192016-09-09 11:52:26 -070014284 switch (filter_arrangement) {
14285 case CAM_FILTER_ARRANGEMENT_RGGB:
14286 case CAM_FILTER_ARRANGEMENT_GRBG:
14287 case CAM_FILTER_ARRANGEMENT_GBRG:
14288 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014289 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14290 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014291 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014292 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14293 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014294 }
14295 break;
14296 case CAM_FILTER_ARRANGEMENT_Y:
14297 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14298 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14299 }
14300 break;
14301 default:
14302 break;
14303 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014304 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14305 feature_mask, stream_type, filter_arrangement);
14306
14307
Thierry Strudel3d639192016-09-09 11:52:26 -070014308}
14309
14310/*===========================================================================
14311* FUNCTION : getSensorMountAngle
14312*
14313* DESCRIPTION: Retrieve sensor mount angle
14314*
14315* PARAMETERS : None
14316*
14317* RETURN : sensor mount angle in uint32_t
14318*==========================================================================*/
14319uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14320{
14321 return gCamCapability[mCameraId]->sensor_mount_angle;
14322}
14323
14324/*===========================================================================
14325* FUNCTION : getRelatedCalibrationData
14326*
14327* DESCRIPTION: Retrieve related system calibration data
14328*
14329* PARAMETERS : None
14330*
14331* RETURN : Pointer of related system calibration data
14332*==========================================================================*/
14333const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14334{
14335 return (const cam_related_system_calibration_data_t *)
14336 &(gCamCapability[mCameraId]->related_cam_calibration);
14337}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014338
14339/*===========================================================================
14340 * FUNCTION : is60HzZone
14341 *
14342 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14343 *
14344 * PARAMETERS : None
14345 *
14346 * RETURN : True if in 60Hz zone, False otherwise
14347 *==========================================================================*/
14348bool QCamera3HardwareInterface::is60HzZone()
14349{
14350 time_t t = time(NULL);
14351 struct tm lt;
14352
14353 struct tm* r = localtime_r(&t, &lt);
14354
14355 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14356 return true;
14357 else
14358 return false;
14359}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014360
14361/*===========================================================================
14362 * FUNCTION : adjustBlackLevelForCFA
14363 *
14364 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14365 * of bayer CFA (Color Filter Array).
14366 *
14367 * PARAMETERS : @input: black level pattern in the order of RGGB
14368 * @output: black level pattern in the order of CFA
14369 * @color_arrangement: CFA color arrangement
14370 *
14371 * RETURN : None
14372 *==========================================================================*/
14373template<typename T>
14374void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14375 T input[BLACK_LEVEL_PATTERN_CNT],
14376 T output[BLACK_LEVEL_PATTERN_CNT],
14377 cam_color_filter_arrangement_t color_arrangement)
14378{
14379 switch (color_arrangement) {
14380 case CAM_FILTER_ARRANGEMENT_GRBG:
14381 output[0] = input[1];
14382 output[1] = input[0];
14383 output[2] = input[3];
14384 output[3] = input[2];
14385 break;
14386 case CAM_FILTER_ARRANGEMENT_GBRG:
14387 output[0] = input[2];
14388 output[1] = input[3];
14389 output[2] = input[0];
14390 output[3] = input[1];
14391 break;
14392 case CAM_FILTER_ARRANGEMENT_BGGR:
14393 output[0] = input[3];
14394 output[1] = input[2];
14395 output[2] = input[1];
14396 output[3] = input[0];
14397 break;
14398 case CAM_FILTER_ARRANGEMENT_RGGB:
14399 output[0] = input[0];
14400 output[1] = input[1];
14401 output[2] = input[2];
14402 output[3] = input[3];
14403 break;
14404 default:
14405 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14406 break;
14407 }
14408}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014409
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014410void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14411 CameraMetadata &resultMetadata,
14412 std::shared_ptr<metadata_buffer_t> settings)
14413{
14414 if (settings == nullptr) {
14415 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14416 return;
14417 }
14418
14419 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14420 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14421 }
14422
14423 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14424 String8 str((const char *)gps_methods);
14425 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14426 }
14427
14428 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14429 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14430 }
14431
14432 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14433 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14434 }
14435
14436 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14437 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14438 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14439 }
14440
14441 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14442 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14443 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14444 }
14445
14446 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14447 int32_t fwk_thumb_size[2];
14448 fwk_thumb_size[0] = thumb_size->width;
14449 fwk_thumb_size[1] = thumb_size->height;
14450 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14451 }
14452
14453 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14454 uint8_t fwk_intent = intent[0];
14455 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14456 }
14457}
14458
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014459bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14460 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14461 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014462{
14463 if (hdrPlusRequest == nullptr) return false;
14464
14465 // Check noise reduction mode is high quality.
14466 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14467 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14468 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014469 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14470 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014471 return false;
14472 }
14473
14474 // Check edge mode is high quality.
14475 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14476 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14477 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14478 return false;
14479 }
14480
14481 if (request.num_output_buffers != 1 ||
14482 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14483 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014484 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14485 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14486 request.output_buffers[0].stream->width,
14487 request.output_buffers[0].stream->height,
14488 request.output_buffers[0].stream->format);
14489 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014490 return false;
14491 }
14492
14493 // Get a YUV buffer from pic channel.
14494 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14495 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14496 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14497 if (res != OK) {
14498 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14499 __FUNCTION__, strerror(-res), res);
14500 return false;
14501 }
14502
14503 pbcamera::StreamBuffer buffer;
14504 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014505 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014506 buffer.data = yuvBuffer->buffer;
14507 buffer.dataSize = yuvBuffer->frame_len;
14508
14509 pbcamera::CaptureRequest pbRequest;
14510 pbRequest.id = request.frame_number;
14511 pbRequest.outputBuffers.push_back(buffer);
14512
14513 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014514 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014515 if (res != OK) {
14516 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14517 strerror(-res), res);
14518 return false;
14519 }
14520
14521 hdrPlusRequest->yuvBuffer = yuvBuffer;
14522 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14523
14524 return true;
14525}
14526
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014527status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14528{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014529 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14530 return OK;
14531 }
14532
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014533 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014534 if (res != OK) {
14535 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14536 strerror(-res), res);
14537 return res;
14538 }
14539 gHdrPlusClientOpening = true;
14540
14541 return OK;
14542}
14543
Chien-Yu Chenee335912017-02-09 17:53:20 -080014544status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14545{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014546 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014547
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014548 if (mHdrPlusModeEnabled) {
14549 return OK;
14550 }
14551
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014552 // Check if gHdrPlusClient is opened or being opened.
14553 if (gHdrPlusClient == nullptr) {
14554 if (gHdrPlusClientOpening) {
14555 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14556 return OK;
14557 }
14558
14559 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014560 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014561 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14562 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014563 return res;
14564 }
14565
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014566 // When opening HDR+ client completes, HDR+ mode will be enabled.
14567 return OK;
14568
Chien-Yu Chenee335912017-02-09 17:53:20 -080014569 }
14570
14571 // Configure stream for HDR+.
14572 res = configureHdrPlusStreamsLocked();
14573 if (res != OK) {
14574 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014575 return res;
14576 }
14577
14578 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14579 res = gHdrPlusClient->setZslHdrPlusMode(true);
14580 if (res != OK) {
14581 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014582 return res;
14583 }
14584
14585 mHdrPlusModeEnabled = true;
14586 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14587
14588 return OK;
14589}
14590
14591void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14592{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014593 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014594 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014595 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14596 if (res != OK) {
14597 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14598 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014599
14600 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014601 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014602 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014603 }
14604
14605 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014606 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014607 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14608}
14609
14610status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014611{
14612 pbcamera::InputConfiguration inputConfig;
14613 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14614 status_t res = OK;
14615
14616 // Configure HDR+ client streams.
14617 // Get input config.
14618 if (mHdrPlusRawSrcChannel) {
14619 // HDR+ input buffers will be provided by HAL.
14620 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14621 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14622 if (res != OK) {
14623 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14624 __FUNCTION__, strerror(-res), res);
14625 return res;
14626 }
14627
14628 inputConfig.isSensorInput = false;
14629 } else {
14630 // Sensor MIPI will send data to Easel.
14631 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014632 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014633 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14634 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14635 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14636 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14637 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014638 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014639 if (mSensorModeInfo.num_raw_bits != 10) {
14640 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14641 mSensorModeInfo.num_raw_bits);
14642 return BAD_VALUE;
14643 }
14644
14645 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014646 }
14647
14648 // Get output configurations.
14649 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014650 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014651
14652 // Easel may need to output YUV output buffers if mPictureChannel was created.
14653 pbcamera::StreamConfiguration yuvOutputConfig;
14654 if (mPictureChannel != nullptr) {
14655 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14656 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14657 if (res != OK) {
14658 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14659 __FUNCTION__, strerror(-res), res);
14660
14661 return res;
14662 }
14663
14664 outputStreamConfigs.push_back(yuvOutputConfig);
14665 }
14666
14667 // TODO: consider other channels for YUV output buffers.
14668
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014669 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014670 if (res != OK) {
14671 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14672 strerror(-res), res);
14673 return res;
14674 }
14675
14676 return OK;
14677}
14678
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014679void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14680{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014681 if (client == nullptr) {
14682 ALOGE("%s: Opened client is null.", __FUNCTION__);
14683 return;
14684 }
14685
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014686 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014687 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14688
14689 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014690 if (!gHdrPlusClientOpening) {
14691 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14692 return;
14693 }
14694
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014695 gHdrPlusClient = std::move(client);
14696 gHdrPlusClientOpening = false;
14697
14698 // Set static metadata.
14699 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14700 if (res != OK) {
14701 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14702 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014703 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014704 gHdrPlusClient = nullptr;
14705 return;
14706 }
14707
14708 // Enable HDR+ mode.
14709 res = enableHdrPlusModeLocked();
14710 if (res != OK) {
14711 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14712 }
14713}
14714
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014715void QCamera3HardwareInterface::onOpenFailed(status_t err)
14716{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014717 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14718 Mutex::Autolock l(gHdrPlusClientLock);
14719 gHdrPlusClientOpening = false;
14720}
14721
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014722void QCamera3HardwareInterface::onFatalError()
14723{
14724 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14725
14726 // Set HAL state to error.
14727 pthread_mutex_lock(&mMutex);
14728 mState = ERROR;
14729 pthread_mutex_unlock(&mMutex);
14730
14731 handleCameraDeviceError();
14732}
14733
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014734void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014735 const camera_metadata_t &resultMetadata)
14736{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014737 if (result != nullptr) {
14738 if (result->outputBuffers.size() != 1) {
14739 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14740 result->outputBuffers.size());
14741 return;
14742 }
14743
14744 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14745 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14746 result->outputBuffers[0].streamId);
14747 return;
14748 }
14749
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014750 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014751 HdrPlusPendingRequest pendingRequest;
14752 {
14753 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14754 auto req = mHdrPlusPendingRequests.find(result->requestId);
14755 pendingRequest = req->second;
14756 }
14757
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014758 // Update the result metadata with the settings of the HDR+ still capture request because
14759 // the result metadata belongs to a ZSL buffer.
14760 CameraMetadata metadata;
14761 metadata = &resultMetadata;
14762 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14763 camera_metadata_t* updatedResultMetadata = metadata.release();
14764
14765 QCamera3PicChannel *picChannel =
14766 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14767
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014768 // Check if dumping HDR+ YUV output is enabled.
14769 char prop[PROPERTY_VALUE_MAX];
14770 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14771 bool dumpYuvOutput = atoi(prop);
14772
14773 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014774 // Dump yuv buffer to a ppm file.
14775 pbcamera::StreamConfiguration outputConfig;
14776 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14777 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14778 if (rc == OK) {
14779 char buf[FILENAME_MAX] = {};
14780 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14781 result->requestId, result->outputBuffers[0].streamId,
14782 outputConfig.image.width, outputConfig.image.height);
14783
14784 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14785 } else {
14786 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14787 __FUNCTION__, strerror(-rc), rc);
14788 }
14789 }
14790
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014791 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14792 auto halMetadata = std::make_shared<metadata_buffer_t>();
14793 clear_metadata_buffer(halMetadata.get());
14794
14795 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14796 // encoding.
14797 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14798 halStreamId, /*minFrameDuration*/0);
14799 if (res == OK) {
14800 // Return the buffer to pic channel for encoding.
14801 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14802 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14803 halMetadata);
14804 } else {
14805 // Return the buffer without encoding.
14806 // TODO: This should not happen but we may want to report an error buffer to camera
14807 // service.
14808 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14809 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14810 strerror(-res), res);
14811 }
14812
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014813 // Find the timestamp
14814 camera_metadata_ro_entry_t entry;
14815 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14816 ANDROID_SENSOR_TIMESTAMP, &entry);
14817 if (res != OK) {
14818 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14819 __FUNCTION__, result->requestId, strerror(-res), res);
14820 } else {
14821 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14822 }
14823
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014824 // Send HDR+ metadata to framework.
14825 {
14826 pthread_mutex_lock(&mMutex);
14827
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014828 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14829 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014830 pthread_mutex_unlock(&mMutex);
14831 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014832
14833 // Remove the HDR+ pending request.
14834 {
14835 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14836 auto req = mHdrPlusPendingRequests.find(result->requestId);
14837 mHdrPlusPendingRequests.erase(req);
14838 }
14839 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014840}
14841
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014842void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14843{
14844 if (failedResult == nullptr) {
14845 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14846 return;
14847 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014848
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014849 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014850
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014851 // Remove the pending HDR+ request.
14852 {
14853 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14854 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14855
14856 // Return the buffer to pic channel.
14857 QCamera3PicChannel *picChannel =
14858 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14859 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14860
14861 mHdrPlusPendingRequests.erase(pendingRequest);
14862 }
14863
14864 pthread_mutex_lock(&mMutex);
14865
14866 // Find the pending buffers.
14867 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14868 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14869 if (pendingBuffers->frame_number == failedResult->requestId) {
14870 break;
14871 }
14872 pendingBuffers++;
14873 }
14874
14875 // Send out buffer errors for the pending buffers.
14876 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14877 std::vector<camera3_stream_buffer_t> streamBuffers;
14878 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14879 // Prepare a stream buffer.
14880 camera3_stream_buffer_t streamBuffer = {};
14881 streamBuffer.stream = buffer.stream;
14882 streamBuffer.buffer = buffer.buffer;
14883 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14884 streamBuffer.acquire_fence = -1;
14885 streamBuffer.release_fence = -1;
14886
14887 streamBuffers.push_back(streamBuffer);
14888
14889 // Send out error buffer event.
14890 camera3_notify_msg_t notify_msg = {};
14891 notify_msg.type = CAMERA3_MSG_ERROR;
14892 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14893 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14894 notify_msg.message.error.error_stream = buffer.stream;
14895
14896 orchestrateNotify(&notify_msg);
14897 }
14898
14899 camera3_capture_result_t result = {};
14900 result.frame_number = pendingBuffers->frame_number;
14901 result.num_output_buffers = streamBuffers.size();
14902 result.output_buffers = &streamBuffers[0];
14903
14904 // Send out result with buffer errors.
14905 orchestrateResult(&result);
14906
14907 // Remove pending buffers.
14908 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14909 }
14910
14911 // Remove pending request.
14912 auto halRequest = mPendingRequestsList.begin();
14913 while (halRequest != mPendingRequestsList.end()) {
14914 if (halRequest->frame_number == failedResult->requestId) {
14915 mPendingRequestsList.erase(halRequest);
14916 break;
14917 }
14918 halRequest++;
14919 }
14920
14921 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014922}
14923
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014924
14925ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14926 mParent(parent) {}
14927
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014928void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014929{
14930 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014931
14932 if (isReprocess) {
14933 mReprocessShutters.emplace(frameNumber, Shutter());
14934 } else {
14935 mShutters.emplace(frameNumber, Shutter());
14936 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014937}
14938
14939void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14940{
14941 std::lock_guard<std::mutex> lock(mLock);
14942
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014943 std::map<uint32_t, Shutter> *shutters = nullptr;
14944
14945 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014946 auto shutter = mShutters.find(frameNumber);
14947 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014948 shutter = mReprocessShutters.find(frameNumber);
14949 if (shutter == mReprocessShutters.end()) {
14950 // Shutter was already sent.
14951 return;
14952 }
14953 shutters = &mReprocessShutters;
14954 } else {
14955 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014956 }
14957
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014958 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014959 shutter->second.ready = true;
14960 shutter->second.timestamp = timestamp;
14961
14962 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014963 shutter = shutters->begin();
14964 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014965 if (!shutter->second.ready) {
14966 // If this shutter is not ready, the following shutters can't be sent.
14967 break;
14968 }
14969
14970 camera3_notify_msg_t msg = {};
14971 msg.type = CAMERA3_MSG_SHUTTER;
14972 msg.message.shutter.frame_number = shutter->first;
14973 msg.message.shutter.timestamp = shutter->second.timestamp;
14974 mParent->orchestrateNotify(&msg);
14975
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014976 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014977 }
14978}
14979
14980void ShutterDispatcher::clear(uint32_t frameNumber)
14981{
14982 std::lock_guard<std::mutex> lock(mLock);
14983 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014984 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014985}
14986
14987void ShutterDispatcher::clear()
14988{
14989 std::lock_guard<std::mutex> lock(mLock);
14990
14991 // Log errors for stale shutters.
14992 for (auto &shutter : mShutters) {
14993 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14994 __FUNCTION__, shutter.first, shutter.second.ready,
14995 shutter.second.timestamp);
14996 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014997
14998 // Log errors for stale reprocess shutters.
14999 for (auto &shutter : mReprocessShutters) {
15000 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15001 __FUNCTION__, shutter.first, shutter.second.ready,
15002 shutter.second.timestamp);
15003 }
15004
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015005 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015006 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015007}
15008
15009OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15010 mParent(parent) {}
15011
15012status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15013{
15014 std::lock_guard<std::mutex> lock(mLock);
15015 mStreamBuffers.clear();
15016 if (!streamList) {
15017 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15018 return -EINVAL;
15019 }
15020
15021 // Create a "frame-number -> buffer" map for each stream.
15022 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15023 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15024 }
15025
15026 return OK;
15027}
15028
15029status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15030{
15031 std::lock_guard<std::mutex> lock(mLock);
15032
15033 // Find the "frame-number -> buffer" map for the stream.
15034 auto buffers = mStreamBuffers.find(stream);
15035 if (buffers == mStreamBuffers.end()) {
15036 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15037 return -EINVAL;
15038 }
15039
15040 // Create an unready buffer for this frame number.
15041 buffers->second.emplace(frameNumber, Buffer());
15042 return OK;
15043}
15044
15045void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15046 const camera3_stream_buffer_t &buffer)
15047{
15048 std::lock_guard<std::mutex> lock(mLock);
15049
15050 // Find the frame number -> buffer map for the stream.
15051 auto buffers = mStreamBuffers.find(buffer.stream);
15052 if (buffers == mStreamBuffers.end()) {
15053 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15054 return;
15055 }
15056
15057 // Find the unready buffer this frame number and mark it ready.
15058 auto pendingBuffer = buffers->second.find(frameNumber);
15059 if (pendingBuffer == buffers->second.end()) {
15060 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15061 return;
15062 }
15063
15064 pendingBuffer->second.ready = true;
15065 pendingBuffer->second.buffer = buffer;
15066
15067 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15068 pendingBuffer = buffers->second.begin();
15069 while (pendingBuffer != buffers->second.end()) {
15070 if (!pendingBuffer->second.ready) {
15071 // If this buffer is not ready, the following buffers can't be sent.
15072 break;
15073 }
15074
15075 camera3_capture_result_t result = {};
15076 result.frame_number = pendingBuffer->first;
15077 result.num_output_buffers = 1;
15078 result.output_buffers = &pendingBuffer->second.buffer;
15079
15080 // Send out result with buffer errors.
15081 mParent->orchestrateResult(&result);
15082
15083 pendingBuffer = buffers->second.erase(pendingBuffer);
15084 }
15085}
15086
15087void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15088{
15089 std::lock_guard<std::mutex> lock(mLock);
15090
15091 // Log errors for stale buffers.
15092 for (auto &buffers : mStreamBuffers) {
15093 for (auto &buffer : buffers.second) {
15094 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15095 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15096 }
15097 buffers.second.clear();
15098 }
15099
15100 if (clearConfiguredStreams) {
15101 mStreamBuffers.clear();
15102 }
15103}
15104
Thierry Strudel3d639192016-09-09 11:52:26 -070015105}; //end namespace qcamera