blob: 9003437adeb2b9943a955450ccbae07dd0194310 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
Jiyong Parkd4caeb72017-06-12 17:16:36 +090068using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070069using namespace android;
70
71namespace qcamera {
72
73#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
74
75#define EMPTY_PIPELINE_DELAY 2
76#define PARTIAL_RESULT_COUNT 2
77#define FRAME_SKIP_DELAY 0
78
79#define MAX_VALUE_8BIT ((1<<8)-1)
80#define MAX_VALUE_10BIT ((1<<10)-1)
81#define MAX_VALUE_12BIT ((1<<12)-1)
82
83#define VIDEO_4K_WIDTH 3840
84#define VIDEO_4K_HEIGHT 2160
85
Jason Leeb9e76432017-03-10 17:14:19 -080086#define MAX_EIS_WIDTH 3840
87#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070088
89#define MAX_RAW_STREAMS 1
90#define MAX_STALLING_STREAMS 1
91#define MAX_PROCESSED_STREAMS 3
92/* Batch mode is enabled only if FPS set is equal to or greater than this */
93#define MIN_FPS_FOR_BATCH_MODE (120)
94#define PREVIEW_FPS_FOR_HFR (30)
95#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080096#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070097#define MAX_HFR_BATCH_SIZE (8)
98#define REGIONS_TUPLE_COUNT 5
99#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
101#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700137// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
138#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
139
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700140// Whether to check for the GPU stride padding, or use the default
141//#define CHECK_GPU_PIXEL_ALIGNMENT
142
Thierry Strudel3d639192016-09-09 11:52:26 -0700143cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
144const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
145extern pthread_mutex_t gCamLock;
146volatile uint32_t gCamHal3LogLevel = 1;
147extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700148
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800149// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150// The following Easel related variables must be protected by gHdrPlusClientLock.
151EaselManagerClient gEaselManagerClient;
152bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
153std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
154bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700155bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700156bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700157
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800158// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
159bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
161Mutex gHdrPlusClientLock; // Protect above Easel related variables.
162
Thierry Strudel3d639192016-09-09 11:52:26 -0700163
164const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
165 {"On", CAM_CDS_MODE_ON},
166 {"Off", CAM_CDS_MODE_OFF},
167 {"Auto",CAM_CDS_MODE_AUTO}
168};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_video_hdr_mode_t,
171 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
172 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
173 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
174};
175
Thierry Strudel54dc9782017-02-15 12:12:10 -0800176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_binning_correction_mode_t,
178 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
179 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
180 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
181};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700182
183const QCamera3HardwareInterface::QCameraMap<
184 camera_metadata_enum_android_ir_mode_t,
185 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
186 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
187 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
188 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
189};
Thierry Strudel3d639192016-09-09 11:52:26 -0700190
191const QCamera3HardwareInterface::QCameraMap<
192 camera_metadata_enum_android_control_effect_mode_t,
193 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
194 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
195 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
196 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
197 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
199 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
200 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
202 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
203};
204
205const QCamera3HardwareInterface::QCameraMap<
206 camera_metadata_enum_android_control_awb_mode_t,
207 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
208 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
209 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
210 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
211 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
212 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
213 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
215 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
216 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220 camera_metadata_enum_android_control_scene_mode_t,
221 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
222 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
223 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
224 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
225 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
227 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
228 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
229 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
230 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
231 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
232 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
233 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
234 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
235 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
236 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800237 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
238 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700239};
240
241const QCamera3HardwareInterface::QCameraMap<
242 camera_metadata_enum_android_control_af_mode_t,
243 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
245 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
246 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
247 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
248 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
250 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
251};
252
253const QCamera3HardwareInterface::QCameraMap<
254 camera_metadata_enum_android_color_correction_aberration_mode_t,
255 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
257 CAM_COLOR_CORRECTION_ABERRATION_OFF },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
259 CAM_COLOR_CORRECTION_ABERRATION_FAST },
260 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
261 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
262};
263
264const QCamera3HardwareInterface::QCameraMap<
265 camera_metadata_enum_android_control_ae_antibanding_mode_t,
266 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
270 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
271};
272
273const QCamera3HardwareInterface::QCameraMap<
274 camera_metadata_enum_android_control_ae_mode_t,
275 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
276 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
279 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
280 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100463 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800464 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mChannelHandle(0),
466 mFirstConfiguration(true),
467 mFlush(false),
468 mFlushPerf(false),
469 mParamHeap(NULL),
470 mParameters(NULL),
471 mPrevParameters(NULL),
472 m_bIsVideo(false),
473 m_bIs4KVideo(false),
474 m_bEisSupportedSize(false),
475 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800476 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700478 mShutterDispatcher(this),
479 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 mMinProcessedFrameDuration(0),
481 mMinJpegFrameDuration(0),
482 mMinRawFrameDuration(0),
483 mMetaFrameCount(0U),
484 mUpdateDebugLevel(false),
485 mCallbacks(callbacks),
486 mCaptureIntent(0),
487 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700488 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800489 /* DevCamDebug metadata internal m control*/
490 mDevCamDebugMetaEnable(0),
491 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700492 mBatchSize(0),
493 mToBeQueuedVidBufs(0),
494 mHFRVideoFps(DEFAULT_VIDEO_FPS),
495 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800496 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800497 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700498 mFirstFrameNumberInBatch(0),
499 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800500 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700501 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
502 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000503 mPDSupported(false),
504 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700505 mInstantAEC(false),
506 mResetInstantAEC(false),
507 mInstantAECSettledFrameNumber(0),
508 mAecSkipDisplayFrameBound(0),
509 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800510 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700513 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mState(CLOSED),
515 mIsDeviceLinked(false),
516 mIsMainCamera(true),
517 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700518 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800519 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700521 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800522 mIsApInputUsedForHdrPlus(false),
523 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800524 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700525{
526 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCommon.init(gCamCapability[cameraId]);
528 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700529#ifndef USE_HAL_3_3
530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
531#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700533#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.close = close_camera_device;
535 mCameraDevice.ops = &mCameraOps;
536 mCameraDevice.priv = this;
537 gCamCapability[cameraId]->version = CAM_HAL_V3;
538 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
539 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
540 gCamCapability[cameraId]->min_num_pp_bufs = 3;
541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545 mPendingLiveRequest = 0;
546 mCurrentRequestId = -1;
547 pthread_mutex_init(&mMutex, NULL);
548
549 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
550 mDefaultMetadata[i] = NULL;
551
552 // Getting system props of different kinds
553 char prop[PROPERTY_VALUE_MAX];
554 memset(prop, 0, sizeof(prop));
555 property_get("persist.camera.raw.dump", prop, "0");
556 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800557 property_get("persist.camera.hal3.force.hdr", prop, "0");
558 mForceHdrSnapshot = atoi(prop);
559
Thierry Strudel3d639192016-09-09 11:52:26 -0700560 if (mEnableRawDump)
561 LOGD("Raw dump from Camera HAL enabled");
562
563 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
564 memset(mLdafCalib, 0, sizeof(mLdafCalib));
565
566 memset(prop, 0, sizeof(prop));
567 property_get("persist.camera.tnr.preview", prop, "0");
568 m_bTnrPreview = (uint8_t)atoi(prop);
569
570 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800571 property_get("persist.camera.swtnr.preview", prop, "1");
572 m_bSwTnrPreview = (uint8_t)atoi(prop);
573
574 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700575 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 m_bTnrVideo = (uint8_t)atoi(prop);
577
578 memset(prop, 0, sizeof(prop));
579 property_get("persist.camera.avtimer.debug", prop, "0");
580 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800581 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700582
Thierry Strudel54dc9782017-02-15 12:12:10 -0800583 memset(prop, 0, sizeof(prop));
584 property_get("persist.camera.cacmode.disable", prop, "0");
585 m_cacModeDisabled = (uint8_t)atoi(prop);
586
Thierry Strudel3d639192016-09-09 11:52:26 -0700587 //Load and read GPU library.
588 lib_surface_utils = NULL;
589 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700590 mSurfaceStridePadding = CAM_PAD_TO_64;
591#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700592 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
593 if (lib_surface_utils) {
594 *(void **)&LINK_get_surface_pixel_alignment =
595 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
596 if (LINK_get_surface_pixel_alignment) {
597 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
598 }
599 dlclose(lib_surface_utils);
600 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700601#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000602 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
603 mPDSupported = (0 <= mPDIndex) ? true : false;
604
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700605 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700606}
607
608/*===========================================================================
609 * FUNCTION : ~QCamera3HardwareInterface
610 *
611 * DESCRIPTION: destructor of QCamera3HardwareInterface
612 *
613 * PARAMETERS : none
614 *
615 * RETURN : none
616 *==========================================================================*/
617QCamera3HardwareInterface::~QCamera3HardwareInterface()
618{
619 LOGD("E");
620
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800621 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700622
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800623 // Disable power hint and enable the perf lock for close camera
624 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
625 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
626
627 // unlink of dualcam during close camera
628 if (mIsDeviceLinked) {
629 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
630 &m_pDualCamCmdPtr->bundle_info;
631 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
632 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
633 pthread_mutex_lock(&gCamLock);
634
635 if (mIsMainCamera == 1) {
636 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
637 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
638 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
639 // related session id should be session id of linked session
640 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
641 } else {
642 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
643 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
644 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
645 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
646 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800647 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800648 pthread_mutex_unlock(&gCamLock);
649
650 rc = mCameraHandle->ops->set_dual_cam_cmd(
651 mCameraHandle->camera_handle);
652 if (rc < 0) {
653 LOGE("Dualcam: Unlink failed, but still proceed to close");
654 }
655 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700656
657 /* We need to stop all streams before deleting any stream */
658 if (mRawDumpChannel) {
659 mRawDumpChannel->stop();
660 }
661
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700662 if (mHdrPlusRawSrcChannel) {
663 mHdrPlusRawSrcChannel->stop();
664 }
665
Thierry Strudel3d639192016-09-09 11:52:26 -0700666 // NOTE: 'camera3_stream_t *' objects are already freed at
667 // this stage by the framework
668 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
669 it != mStreamInfo.end(); it++) {
670 QCamera3ProcessingChannel *channel = (*it)->channel;
671 if (channel) {
672 channel->stop();
673 }
674 }
675 if (mSupportChannel)
676 mSupportChannel->stop();
677
678 if (mAnalysisChannel) {
679 mAnalysisChannel->stop();
680 }
681 if (mMetadataChannel) {
682 mMetadataChannel->stop();
683 }
684 if (mChannelHandle) {
685 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
686 mChannelHandle);
687 LOGD("stopping channel %d", mChannelHandle);
688 }
689
690 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
691 it != mStreamInfo.end(); it++) {
692 QCamera3ProcessingChannel *channel = (*it)->channel;
693 if (channel)
694 delete channel;
695 free (*it);
696 }
697 if (mSupportChannel) {
698 delete mSupportChannel;
699 mSupportChannel = NULL;
700 }
701
702 if (mAnalysisChannel) {
703 delete mAnalysisChannel;
704 mAnalysisChannel = NULL;
705 }
706 if (mRawDumpChannel) {
707 delete mRawDumpChannel;
708 mRawDumpChannel = NULL;
709 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700710 if (mHdrPlusRawSrcChannel) {
711 delete mHdrPlusRawSrcChannel;
712 mHdrPlusRawSrcChannel = NULL;
713 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700714 if (mDummyBatchChannel) {
715 delete mDummyBatchChannel;
716 mDummyBatchChannel = NULL;
717 }
718
719 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800720 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700721
722 if (mMetadataChannel) {
723 delete mMetadataChannel;
724 mMetadataChannel = NULL;
725 }
726
727 /* Clean up all channels */
728 if (mCameraInitialized) {
729 if(!mFirstConfiguration){
730 //send the last unconfigure
731 cam_stream_size_info_t stream_config_info;
732 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
733 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
734 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800735 m_bIs4KVideo ? 0 :
736 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700737 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
739 stream_config_info);
740 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
741 if (rc < 0) {
742 LOGE("set_parms failed for unconfigure");
743 }
744 }
745 deinitParameters();
746 }
747
748 if (mChannelHandle) {
749 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
750 mChannelHandle);
751 LOGH("deleting channel %d", mChannelHandle);
752 mChannelHandle = 0;
753 }
754
755 if (mState != CLOSED)
756 closeCamera();
757
758 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
759 req.mPendingBufferList.clear();
760 }
761 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 for (pendingRequestIterator i = mPendingRequestsList.begin();
763 i != mPendingRequestsList.end();) {
764 i = erasePendingRequest(i);
765 }
766 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
767 if (mDefaultMetadata[i])
768 free_camera_metadata(mDefaultMetadata[i]);
769
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800770 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700771
772 pthread_cond_destroy(&mRequestCond);
773
774 pthread_cond_destroy(&mBuffersCond);
775
776 pthread_mutex_destroy(&mMutex);
777 LOGD("X");
778}
779
780/*===========================================================================
781 * FUNCTION : erasePendingRequest
782 *
783 * DESCRIPTION: function to erase a desired pending request after freeing any
784 * allocated memory
785 *
786 * PARAMETERS :
787 * @i : iterator pointing to pending request to be erased
788 *
789 * RETURN : iterator pointing to the next request
790 *==========================================================================*/
791QCamera3HardwareInterface::pendingRequestIterator
792 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
793{
794 if (i->input_buffer != NULL) {
795 free(i->input_buffer);
796 i->input_buffer = NULL;
797 }
798 if (i->settings != NULL)
799 free_camera_metadata((camera_metadata_t*)i->settings);
800 return mPendingRequestsList.erase(i);
801}
802
803/*===========================================================================
804 * FUNCTION : camEvtHandle
805 *
806 * DESCRIPTION: Function registered to mm-camera-interface to handle events
807 *
808 * PARAMETERS :
809 * @camera_handle : interface layer camera handle
810 * @evt : ptr to event
811 * @user_data : user data ptr
812 *
813 * RETURN : none
814 *==========================================================================*/
815void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
816 mm_camera_event_t *evt,
817 void *user_data)
818{
819 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
820 if (obj && evt) {
821 switch(evt->server_event_type) {
822 case CAM_EVENT_TYPE_DAEMON_DIED:
823 pthread_mutex_lock(&obj->mMutex);
824 obj->mState = ERROR;
825 pthread_mutex_unlock(&obj->mMutex);
826 LOGE("Fatal, camera daemon died");
827 break;
828
829 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
830 LOGD("HAL got request pull from Daemon");
831 pthread_mutex_lock(&obj->mMutex);
832 obj->mWokenUpByDaemon = true;
833 obj->unblockRequestIfNecessary();
834 pthread_mutex_unlock(&obj->mMutex);
835 break;
836
837 default:
838 LOGW("Warning: Unhandled event %d",
839 evt->server_event_type);
840 break;
841 }
842 } else {
843 LOGE("NULL user_data/evt");
844 }
845}
846
847/*===========================================================================
848 * FUNCTION : openCamera
849 *
850 * DESCRIPTION: open camera
851 *
852 * PARAMETERS :
853 * @hw_device : double ptr for camera device struct
854 *
855 * RETURN : int32_t type of status
856 * NO_ERROR -- success
857 * none-zero failure code
858 *==========================================================================*/
859int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
860{
861 int rc = 0;
862 if (mState != CLOSED) {
863 *hw_device = NULL;
864 return PERMISSION_DENIED;
865 }
866
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700867 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800868 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
870 mCameraId);
871
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700872 if (mCameraHandle) {
873 LOGE("Failure: Camera already opened");
874 return ALREADY_EXISTS;
875 }
876
877 {
878 Mutex::Autolock l(gHdrPlusClientLock);
879 if (gEaselManagerClient.isEaselPresentOnDevice()) {
880 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
881 rc = gEaselManagerClient.resume();
882 if (rc != 0) {
883 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
884 return rc;
885 }
886 }
887 }
888
Thierry Strudel3d639192016-09-09 11:52:26 -0700889 rc = openCamera();
890 if (rc == 0) {
891 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800892 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700893 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700894
895 // Suspend Easel because opening camera failed.
896 {
897 Mutex::Autolock l(gHdrPlusClientLock);
898 if (gEaselManagerClient.isEaselPresentOnDevice()) {
899 status_t suspendErr = gEaselManagerClient.suspend();
900 if (suspendErr != 0) {
901 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
902 strerror(-suspendErr), suspendErr);
903 }
904 }
905 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800906 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700907
Thierry Strudel3d639192016-09-09 11:52:26 -0700908 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
909 mCameraId, rc);
910
911 if (rc == NO_ERROR) {
912 mState = OPENED;
913 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800914
Thierry Strudel3d639192016-09-09 11:52:26 -0700915 return rc;
916}
917
918/*===========================================================================
919 * FUNCTION : openCamera
920 *
921 * DESCRIPTION: open camera
922 *
923 * PARAMETERS : none
924 *
925 * RETURN : int32_t type of status
926 * NO_ERROR -- success
927 * none-zero failure code
928 *==========================================================================*/
929int QCamera3HardwareInterface::openCamera()
930{
931 int rc = 0;
932 char value[PROPERTY_VALUE_MAX];
933
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800934 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800935
Thierry Strudel3d639192016-09-09 11:52:26 -0700936 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
937 if (rc < 0) {
938 LOGE("Failed to reserve flash for camera id: %d",
939 mCameraId);
940 return UNKNOWN_ERROR;
941 }
942
943 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
944 if (rc) {
945 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
946 return rc;
947 }
948
949 if (!mCameraHandle) {
950 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
951 return -ENODEV;
952 }
953
954 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
955 camEvtHandle, (void *)this);
956
957 if (rc < 0) {
958 LOGE("Error, failed to register event callback");
959 /* Not closing camera here since it is already handled in destructor */
960 return FAILED_TRANSACTION;
961 }
962
963 mExifParams.debug_params =
964 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
965 if (mExifParams.debug_params) {
966 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
967 } else {
968 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
969 return NO_MEMORY;
970 }
971 mFirstConfiguration = true;
972
973 //Notify display HAL that a camera session is active.
974 //But avoid calling the same during bootup because camera service might open/close
975 //cameras at boot time during its initialization and display service will also internally
976 //wait for camera service to initialize first while calling this display API, resulting in a
977 //deadlock situation. Since boot time camera open/close calls are made only to fetch
978 //capabilities, no need of this display bw optimization.
979 //Use "service.bootanim.exit" property to know boot status.
980 property_get("service.bootanim.exit", value, "0");
981 if (atoi(value) == 1) {
982 pthread_mutex_lock(&gCamLock);
983 if (gNumCameraSessions++ == 0) {
984 setCameraLaunchStatus(true);
985 }
986 pthread_mutex_unlock(&gCamLock);
987 }
988
989 //fill the session id needed while linking dual cam
990 pthread_mutex_lock(&gCamLock);
991 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
992 &sessionId[mCameraId]);
993 pthread_mutex_unlock(&gCamLock);
994
995 if (rc < 0) {
996 LOGE("Error, failed to get sessiion id");
997 return UNKNOWN_ERROR;
998 } else {
999 //Allocate related cam sync buffer
1000 //this is needed for the payload that goes along with bundling cmd for related
1001 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001002 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1003 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001004 if(rc != OK) {
1005 rc = NO_MEMORY;
1006 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1007 return NO_MEMORY;
1008 }
1009
1010 //Map memory for related cam sync buffer
1011 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001012 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1013 m_pDualCamCmdHeap->getFd(0),
1014 sizeof(cam_dual_camera_cmd_info_t),
1015 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001016 if(rc < 0) {
1017 LOGE("Dualcam: failed to map Related cam sync buffer");
1018 rc = FAILED_TRANSACTION;
1019 return NO_MEMORY;
1020 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001021 m_pDualCamCmdPtr =
1022 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001023 }
1024
1025 LOGH("mCameraId=%d",mCameraId);
1026
1027 return NO_ERROR;
1028}
1029
1030/*===========================================================================
1031 * FUNCTION : closeCamera
1032 *
1033 * DESCRIPTION: close camera
1034 *
1035 * PARAMETERS : none
1036 *
1037 * RETURN : int32_t type of status
1038 * NO_ERROR -- success
1039 * none-zero failure code
1040 *==========================================================================*/
1041int QCamera3HardwareInterface::closeCamera()
1042{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001043 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001044 int rc = NO_ERROR;
1045 char value[PROPERTY_VALUE_MAX];
1046
1047 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1048 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001049
1050 // unmap memory for related cam sync buffer
1051 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001052 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 if (NULL != m_pDualCamCmdHeap) {
1054 m_pDualCamCmdHeap->deallocate();
1055 delete m_pDualCamCmdHeap;
1056 m_pDualCamCmdHeap = NULL;
1057 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001058 }
1059
Thierry Strudel3d639192016-09-09 11:52:26 -07001060 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1061 mCameraHandle = NULL;
1062
1063 //reset session id to some invalid id
1064 pthread_mutex_lock(&gCamLock);
1065 sessionId[mCameraId] = 0xDEADBEEF;
1066 pthread_mutex_unlock(&gCamLock);
1067
1068 //Notify display HAL that there is no active camera session
1069 //but avoid calling the same during bootup. Refer to openCamera
1070 //for more details.
1071 property_get("service.bootanim.exit", value, "0");
1072 if (atoi(value) == 1) {
1073 pthread_mutex_lock(&gCamLock);
1074 if (--gNumCameraSessions == 0) {
1075 setCameraLaunchStatus(false);
1076 }
1077 pthread_mutex_unlock(&gCamLock);
1078 }
1079
Thierry Strudel3d639192016-09-09 11:52:26 -07001080 if (mExifParams.debug_params) {
1081 free(mExifParams.debug_params);
1082 mExifParams.debug_params = NULL;
1083 }
1084 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1085 LOGW("Failed to release flash for camera id: %d",
1086 mCameraId);
1087 }
1088 mState = CLOSED;
1089 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1090 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001091
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001092 {
1093 Mutex::Autolock l(gHdrPlusClientLock);
1094 if (gHdrPlusClient != nullptr) {
1095 // Disable HDR+ mode.
1096 disableHdrPlusModeLocked();
1097 // Disconnect Easel if it's connected.
1098 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1099 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001100 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001101
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001102 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001103 rc = gEaselManagerClient.stopMipi(mCameraId);
1104 if (rc != 0) {
1105 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1106 }
1107
1108 rc = gEaselManagerClient.suspend();
1109 if (rc != 0) {
1110 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1111 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001112 }
1113 }
1114
Thierry Strudel3d639192016-09-09 11:52:26 -07001115 return rc;
1116}
1117
1118/*===========================================================================
1119 * FUNCTION : initialize
1120 *
1121 * DESCRIPTION: Initialize frameworks callback functions
1122 *
1123 * PARAMETERS :
1124 * @callback_ops : callback function to frameworks
1125 *
1126 * RETURN :
1127 *
1128 *==========================================================================*/
1129int QCamera3HardwareInterface::initialize(
1130 const struct camera3_callback_ops *callback_ops)
1131{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001132 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001133 int rc;
1134
1135 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1136 pthread_mutex_lock(&mMutex);
1137
1138 // Validate current state
1139 switch (mState) {
1140 case OPENED:
1141 /* valid state */
1142 break;
1143 default:
1144 LOGE("Invalid state %d", mState);
1145 rc = -ENODEV;
1146 goto err1;
1147 }
1148
1149 rc = initParameters();
1150 if (rc < 0) {
1151 LOGE("initParamters failed %d", rc);
1152 goto err1;
1153 }
1154 mCallbackOps = callback_ops;
1155
1156 mChannelHandle = mCameraHandle->ops->add_channel(
1157 mCameraHandle->camera_handle, NULL, NULL, this);
1158 if (mChannelHandle == 0) {
1159 LOGE("add_channel failed");
1160 rc = -ENOMEM;
1161 pthread_mutex_unlock(&mMutex);
1162 return rc;
1163 }
1164
1165 pthread_mutex_unlock(&mMutex);
1166 mCameraInitialized = true;
1167 mState = INITIALIZED;
1168 LOGI("X");
1169 return 0;
1170
1171err1:
1172 pthread_mutex_unlock(&mMutex);
1173 return rc;
1174}
1175
1176/*===========================================================================
1177 * FUNCTION : validateStreamDimensions
1178 *
1179 * DESCRIPTION: Check if the configuration requested are those advertised
1180 *
1181 * PARAMETERS :
1182 * @stream_list : streams to be configured
1183 *
1184 * RETURN :
1185 *
1186 *==========================================================================*/
1187int QCamera3HardwareInterface::validateStreamDimensions(
1188 camera3_stream_configuration_t *streamList)
1189{
1190 int rc = NO_ERROR;
1191 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001192 uint32_t depthWidth = 0;
1193 uint32_t depthHeight = 0;
1194 if (mPDSupported) {
1195 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1196 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1197 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001198
1199 camera3_stream_t *inputStream = NULL;
1200 /*
1201 * Loop through all streams to find input stream if it exists*
1202 */
1203 for (size_t i = 0; i< streamList->num_streams; i++) {
1204 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1205 if (inputStream != NULL) {
1206 LOGE("Error, Multiple input streams requested");
1207 return -EINVAL;
1208 }
1209 inputStream = streamList->streams[i];
1210 }
1211 }
1212 /*
1213 * Loop through all streams requested in configuration
1214 * Check if unsupported sizes have been requested on any of them
1215 */
1216 for (size_t j = 0; j < streamList->num_streams; j++) {
1217 bool sizeFound = false;
1218 camera3_stream_t *newStream = streamList->streams[j];
1219
1220 uint32_t rotatedHeight = newStream->height;
1221 uint32_t rotatedWidth = newStream->width;
1222 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1223 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1224 rotatedHeight = newStream->width;
1225 rotatedWidth = newStream->height;
1226 }
1227
1228 /*
1229 * Sizes are different for each type of stream format check against
1230 * appropriate table.
1231 */
1232 switch (newStream->format) {
1233 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1234 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1235 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001236 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1237 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1238 mPDSupported) {
1239 if ((depthWidth == newStream->width) &&
1240 (depthHeight == newStream->height)) {
1241 sizeFound = true;
1242 }
1243 break;
1244 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001245 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1246 for (size_t i = 0; i < count; i++) {
1247 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1248 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1249 sizeFound = true;
1250 break;
1251 }
1252 }
1253 break;
1254 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1256 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001257 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001258 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001259 if ((depthSamplesCount == newStream->width) &&
1260 (1 == newStream->height)) {
1261 sizeFound = true;
1262 }
1263 break;
1264 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001265 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1266 /* Verify set size against generated sizes table */
1267 for (size_t i = 0; i < count; i++) {
1268 if (((int32_t)rotatedWidth ==
1269 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1270 ((int32_t)rotatedHeight ==
1271 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1272 sizeFound = true;
1273 break;
1274 }
1275 }
1276 break;
1277 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1278 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1279 default:
1280 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1281 || newStream->stream_type == CAMERA3_STREAM_INPUT
1282 || IS_USAGE_ZSL(newStream->usage)) {
1283 if (((int32_t)rotatedWidth ==
1284 gCamCapability[mCameraId]->active_array_size.width) &&
1285 ((int32_t)rotatedHeight ==
1286 gCamCapability[mCameraId]->active_array_size.height)) {
1287 sizeFound = true;
1288 break;
1289 }
1290 /* We could potentially break here to enforce ZSL stream
1291 * set from frameworks always is full active array size
1292 * but it is not clear from the spc if framework will always
1293 * follow that, also we have logic to override to full array
1294 * size, so keeping the logic lenient at the moment
1295 */
1296 }
1297 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1298 MAX_SIZES_CNT);
1299 for (size_t i = 0; i < count; i++) {
1300 if (((int32_t)rotatedWidth ==
1301 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1302 ((int32_t)rotatedHeight ==
1303 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1304 sizeFound = true;
1305 break;
1306 }
1307 }
1308 break;
1309 } /* End of switch(newStream->format) */
1310
1311 /* We error out even if a single stream has unsupported size set */
1312 if (!sizeFound) {
1313 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1314 rotatedWidth, rotatedHeight, newStream->format,
1315 gCamCapability[mCameraId]->active_array_size.width,
1316 gCamCapability[mCameraId]->active_array_size.height);
1317 rc = -EINVAL;
1318 break;
1319 }
1320 } /* End of for each stream */
1321 return rc;
1322}
1323
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001324/*===========================================================================
1325 * FUNCTION : validateUsageFlags
1326 *
1327 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1328 *
1329 * PARAMETERS :
1330 * @stream_list : streams to be configured
1331 *
1332 * RETURN :
1333 * NO_ERROR if the usage flags are supported
1334 * error code if usage flags are not supported
1335 *
1336 *==========================================================================*/
1337int QCamera3HardwareInterface::validateUsageFlags(
1338 const camera3_stream_configuration_t* streamList)
1339{
1340 for (size_t j = 0; j < streamList->num_streams; j++) {
1341 const camera3_stream_t *newStream = streamList->streams[j];
1342
1343 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1344 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1345 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1346 continue;
1347 }
1348
Jason Leec4cf5032017-05-24 18:31:41 -07001349 // Here we only care whether it's EIS3 or not
1350 char is_type_value[PROPERTY_VALUE_MAX];
1351 property_get("persist.camera.is_type", is_type_value, "4");
1352 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1353 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1354 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1355 isType = IS_TYPE_NONE;
1356
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001357 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1358 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1359 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1360 bool forcePreviewUBWC = true;
1361 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1362 forcePreviewUBWC = false;
1363 }
1364 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001365 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001366 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001367 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001369 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001370
1371 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1372 // So color spaces will always match.
1373
1374 // Check whether underlying formats of shared streams match.
1375 if (isVideo && isPreview && videoFormat != previewFormat) {
1376 LOGE("Combined video and preview usage flag is not supported");
1377 return -EINVAL;
1378 }
1379 if (isPreview && isZSL && previewFormat != zslFormat) {
1380 LOGE("Combined preview and zsl usage flag is not supported");
1381 return -EINVAL;
1382 }
1383 if (isVideo && isZSL && videoFormat != zslFormat) {
1384 LOGE("Combined video and zsl usage flag is not supported");
1385 return -EINVAL;
1386 }
1387 }
1388 return NO_ERROR;
1389}
1390
1391/*===========================================================================
1392 * FUNCTION : validateUsageFlagsForEis
1393 *
1394 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 *
1399 * RETURN :
1400 * NO_ERROR if the usage flags are supported
1401 * error code if usage flags are not supported
1402 *
1403 *==========================================================================*/
1404int QCamera3HardwareInterface::validateUsageFlagsForEis(
1405 const camera3_stream_configuration_t* streamList)
1406{
1407 for (size_t j = 0; j < streamList->num_streams; j++) {
1408 const camera3_stream_t *newStream = streamList->streams[j];
1409
1410 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1411 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1412
1413 // Because EIS is "hard-coded" for certain use case, and current
1414 // implementation doesn't support shared preview and video on the same
1415 // stream, return failure if EIS is forced on.
1416 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1417 LOGE("Combined video and preview usage flag is not supported due to EIS");
1418 return -EINVAL;
1419 }
1420 }
1421 return NO_ERROR;
1422}
1423
Thierry Strudel3d639192016-09-09 11:52:26 -07001424/*==============================================================================
1425 * FUNCTION : isSupportChannelNeeded
1426 *
1427 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1428 *
1429 * PARAMETERS :
1430 * @stream_list : streams to be configured
1431 * @stream_config_info : the config info for streams to be configured
1432 *
1433 * RETURN : Boolen true/false decision
1434 *
1435 *==========================================================================*/
1436bool QCamera3HardwareInterface::isSupportChannelNeeded(
1437 camera3_stream_configuration_t *streamList,
1438 cam_stream_size_info_t stream_config_info)
1439{
1440 uint32_t i;
1441 bool pprocRequested = false;
1442 /* Check for conditions where PProc pipeline does not have any streams*/
1443 for (i = 0; i < stream_config_info.num_streams; i++) {
1444 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1445 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1446 pprocRequested = true;
1447 break;
1448 }
1449 }
1450
1451 if (pprocRequested == false )
1452 return true;
1453
1454 /* Dummy stream needed if only raw or jpeg streams present */
1455 for (i = 0; i < streamList->num_streams; i++) {
1456 switch(streamList->streams[i]->format) {
1457 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1458 case HAL_PIXEL_FORMAT_RAW10:
1459 case HAL_PIXEL_FORMAT_RAW16:
1460 case HAL_PIXEL_FORMAT_BLOB:
1461 break;
1462 default:
1463 return false;
1464 }
1465 }
1466 return true;
1467}
1468
1469/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001470 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001471 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001473 *
1474 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001475 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001476 *
1477 * RETURN : int32_t type of status
1478 * NO_ERROR -- success
1479 * none-zero failure code
1480 *
1481 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001483{
1484 int32_t rc = NO_ERROR;
1485
1486 cam_dimension_t max_dim = {0, 0};
1487 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1488 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1489 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1490 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1491 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1492 }
1493
1494 clear_metadata_buffer(mParameters);
1495
1496 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1497 max_dim);
1498 if (rc != NO_ERROR) {
1499 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1500 return rc;
1501 }
1502
1503 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1504 if (rc != NO_ERROR) {
1505 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1506 return rc;
1507 }
1508
1509 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001510 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001511
1512 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1513 mParameters);
1514 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001515 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001516 return rc;
1517 }
1518
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001519 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001520 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1521 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1522 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1523 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1524 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001525
1526 return rc;
1527}
1528
1529/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001530 * FUNCTION : getCurrentSensorModeInfo
1531 *
1532 * DESCRIPTION: Get sensor mode information that is currently selected.
1533 *
1534 * PARAMETERS :
1535 * @sensorModeInfo : sensor mode information (output)
1536 *
1537 * RETURN : int32_t type of status
1538 * NO_ERROR -- success
1539 * none-zero failure code
1540 *
1541 *==========================================================================*/
1542int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1543{
1544 int32_t rc = NO_ERROR;
1545
1546 clear_metadata_buffer(mParameters);
1547 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1548
1549 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1550 mParameters);
1551 if (rc != NO_ERROR) {
1552 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1553 return rc;
1554 }
1555
1556 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1557 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1558 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1559 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1560 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1561 sensorModeInfo.num_raw_bits);
1562
1563 return rc;
1564}
1565
1566/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001567 * FUNCTION : addToPPFeatureMask
1568 *
1569 * DESCRIPTION: add additional features to pp feature mask based on
1570 * stream type and usecase
1571 *
1572 * PARAMETERS :
1573 * @stream_format : stream type for feature mask
1574 * @stream_idx : stream idx within postprocess_mask list to change
1575 *
1576 * RETURN : NULL
1577 *
1578 *==========================================================================*/
1579void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1580 uint32_t stream_idx)
1581{
1582 char feature_mask_value[PROPERTY_VALUE_MAX];
1583 cam_feature_mask_t feature_mask;
1584 int args_converted;
1585 int property_len;
1586
1587 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001588#ifdef _LE_CAMERA_
1589 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1590 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1591 property_len = property_get("persist.camera.hal3.feature",
1592 feature_mask_value, swtnr_feature_mask_value);
1593#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001594 property_len = property_get("persist.camera.hal3.feature",
1595 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001596#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001597 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1598 (feature_mask_value[1] == 'x')) {
1599 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1600 } else {
1601 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1602 }
1603 if (1 != args_converted) {
1604 feature_mask = 0;
1605 LOGE("Wrong feature mask %s", feature_mask_value);
1606 return;
1607 }
1608
1609 switch (stream_format) {
1610 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1611 /* Add LLVD to pp feature mask only if video hint is enabled */
1612 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1613 mStreamConfigInfo.postprocess_mask[stream_idx]
1614 |= CAM_QTI_FEATURE_SW_TNR;
1615 LOGH("Added SW TNR to pp feature mask");
1616 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1617 mStreamConfigInfo.postprocess_mask[stream_idx]
1618 |= CAM_QCOM_FEATURE_LLVD;
1619 LOGH("Added LLVD SeeMore to pp feature mask");
1620 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001621 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1622 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1623 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1624 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001625 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1626 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1627 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1628 CAM_QTI_FEATURE_BINNING_CORRECTION;
1629 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001630 break;
1631 }
1632 default:
1633 break;
1634 }
1635 LOGD("PP feature mask %llx",
1636 mStreamConfigInfo.postprocess_mask[stream_idx]);
1637}
1638
1639/*==============================================================================
1640 * FUNCTION : updateFpsInPreviewBuffer
1641 *
1642 * DESCRIPTION: update FPS information in preview buffer.
1643 *
1644 * PARAMETERS :
1645 * @metadata : pointer to metadata buffer
1646 * @frame_number: frame_number to look for in pending buffer list
1647 *
1648 * RETURN : None
1649 *
1650 *==========================================================================*/
1651void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1652 uint32_t frame_number)
1653{
1654 // Mark all pending buffers for this particular request
1655 // with corresponding framerate information
1656 for (List<PendingBuffersInRequest>::iterator req =
1657 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1658 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1659 for(List<PendingBufferInfo>::iterator j =
1660 req->mPendingBufferList.begin();
1661 j != req->mPendingBufferList.end(); j++) {
1662 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1663 if ((req->frame_number == frame_number) &&
1664 (channel->getStreamTypeMask() &
1665 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1666 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1667 CAM_INTF_PARM_FPS_RANGE, metadata) {
1668 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1669 struct private_handle_t *priv_handle =
1670 (struct private_handle_t *)(*(j->buffer));
1671 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1672 }
1673 }
1674 }
1675 }
1676}
1677
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001678/*==============================================================================
1679 * FUNCTION : updateTimeStampInPendingBuffers
1680 *
1681 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1682 * of a frame number
1683 *
1684 * PARAMETERS :
1685 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1686 * @timestamp : timestamp to be set
1687 *
1688 * RETURN : None
1689 *
1690 *==========================================================================*/
1691void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1692 uint32_t frameNumber, nsecs_t timestamp)
1693{
1694 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1695 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1696 if (req->frame_number != frameNumber)
1697 continue;
1698
1699 for (auto k = req->mPendingBufferList.begin();
1700 k != req->mPendingBufferList.end(); k++ ) {
1701 struct private_handle_t *priv_handle =
1702 (struct private_handle_t *) (*(k->buffer));
1703 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1704 }
1705 }
1706 return;
1707}
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709/*===========================================================================
1710 * FUNCTION : configureStreams
1711 *
1712 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1713 * and output streams.
1714 *
1715 * PARAMETERS :
1716 * @stream_list : streams to be configured
1717 *
1718 * RETURN :
1719 *
1720 *==========================================================================*/
1721int QCamera3HardwareInterface::configureStreams(
1722 camera3_stream_configuration_t *streamList)
1723{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001724 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001725 int rc = 0;
1726
1727 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001728 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001729 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001730 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001731
1732 return rc;
1733}
1734
1735/*===========================================================================
1736 * FUNCTION : configureStreamsPerfLocked
1737 *
1738 * DESCRIPTION: configureStreams while perfLock is held.
1739 *
1740 * PARAMETERS :
1741 * @stream_list : streams to be configured
1742 *
1743 * RETURN : int32_t type of status
1744 * NO_ERROR -- success
1745 * none-zero failure code
1746 *==========================================================================*/
1747int QCamera3HardwareInterface::configureStreamsPerfLocked(
1748 camera3_stream_configuration_t *streamList)
1749{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001750 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001751 int rc = 0;
1752
1753 // Sanity check stream_list
1754 if (streamList == NULL) {
1755 LOGE("NULL stream configuration");
1756 return BAD_VALUE;
1757 }
1758 if (streamList->streams == NULL) {
1759 LOGE("NULL stream list");
1760 return BAD_VALUE;
1761 }
1762
1763 if (streamList->num_streams < 1) {
1764 LOGE("Bad number of streams requested: %d",
1765 streamList->num_streams);
1766 return BAD_VALUE;
1767 }
1768
1769 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1770 LOGE("Maximum number of streams %d exceeded: %d",
1771 MAX_NUM_STREAMS, streamList->num_streams);
1772 return BAD_VALUE;
1773 }
1774
Jason Leec4cf5032017-05-24 18:31:41 -07001775 mOpMode = streamList->operation_mode;
1776 LOGD("mOpMode: %d", mOpMode);
1777
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001778 rc = validateUsageFlags(streamList);
1779 if (rc != NO_ERROR) {
1780 return rc;
1781 }
1782
Thierry Strudel3d639192016-09-09 11:52:26 -07001783 /* first invalidate all the steams in the mStreamList
1784 * if they appear again, they will be validated */
1785 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1786 it != mStreamInfo.end(); it++) {
1787 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1788 if (channel) {
1789 channel->stop();
1790 }
1791 (*it)->status = INVALID;
1792 }
1793
1794 if (mRawDumpChannel) {
1795 mRawDumpChannel->stop();
1796 delete mRawDumpChannel;
1797 mRawDumpChannel = NULL;
1798 }
1799
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001800 if (mHdrPlusRawSrcChannel) {
1801 mHdrPlusRawSrcChannel->stop();
1802 delete mHdrPlusRawSrcChannel;
1803 mHdrPlusRawSrcChannel = NULL;
1804 }
1805
Thierry Strudel3d639192016-09-09 11:52:26 -07001806 if (mSupportChannel)
1807 mSupportChannel->stop();
1808
1809 if (mAnalysisChannel) {
1810 mAnalysisChannel->stop();
1811 }
1812 if (mMetadataChannel) {
1813 /* If content of mStreamInfo is not 0, there is metadata stream */
1814 mMetadataChannel->stop();
1815 }
1816 if (mChannelHandle) {
1817 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1818 mChannelHandle);
1819 LOGD("stopping channel %d", mChannelHandle);
1820 }
1821
1822 pthread_mutex_lock(&mMutex);
1823
1824 // Check state
1825 switch (mState) {
1826 case INITIALIZED:
1827 case CONFIGURED:
1828 case STARTED:
1829 /* valid state */
1830 break;
1831 default:
1832 LOGE("Invalid state %d", mState);
1833 pthread_mutex_unlock(&mMutex);
1834 return -ENODEV;
1835 }
1836
1837 /* Check whether we have video stream */
1838 m_bIs4KVideo = false;
1839 m_bIsVideo = false;
1840 m_bEisSupportedSize = false;
1841 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001842 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001843 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001844 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001845 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 uint32_t videoWidth = 0U;
1847 uint32_t videoHeight = 0U;
1848 size_t rawStreamCnt = 0;
1849 size_t stallStreamCnt = 0;
1850 size_t processedStreamCnt = 0;
1851 // Number of streams on ISP encoder path
1852 size_t numStreamsOnEncoder = 0;
1853 size_t numYuv888OnEncoder = 0;
1854 bool bYuv888OverrideJpeg = false;
1855 cam_dimension_t largeYuv888Size = {0, 0};
1856 cam_dimension_t maxViewfinderSize = {0, 0};
1857 bool bJpegExceeds4K = false;
1858 bool bJpegOnEncoder = false;
1859 bool bUseCommonFeatureMask = false;
1860 cam_feature_mask_t commonFeatureMask = 0;
1861 bool bSmallJpegSize = false;
1862 uint32_t width_ratio;
1863 uint32_t height_ratio;
1864 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1865 camera3_stream_t *inputStream = NULL;
1866 bool isJpeg = false;
1867 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001868 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001869 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001870
1871 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1872
1873 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001874 uint8_t eis_prop_set;
1875 uint32_t maxEisWidth = 0;
1876 uint32_t maxEisHeight = 0;
1877
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001878 // Initialize all instant AEC related variables
1879 mInstantAEC = false;
1880 mResetInstantAEC = false;
1881 mInstantAECSettledFrameNumber = 0;
1882 mAecSkipDisplayFrameBound = 0;
1883 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001884 mCurrFeatureState = 0;
1885 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001886
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1888
1889 size_t count = IS_TYPE_MAX;
1890 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1891 for (size_t i = 0; i < count; i++) {
1892 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001893 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1894 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001895 break;
1896 }
1897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001898
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001899 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001900 maxEisWidth = MAX_EIS_WIDTH;
1901 maxEisHeight = MAX_EIS_HEIGHT;
1902 }
1903
1904 /* EIS setprop control */
1905 char eis_prop[PROPERTY_VALUE_MAX];
1906 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001907 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001908 eis_prop_set = (uint8_t)atoi(eis_prop);
1909
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001910 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001911 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1912
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001913 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1914 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001915
Thierry Strudel3d639192016-09-09 11:52:26 -07001916 /* stream configurations */
1917 for (size_t i = 0; i < streamList->num_streams; i++) {
1918 camera3_stream_t *newStream = streamList->streams[i];
1919 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1920 "height = %d, rotation = %d, usage = 0x%x",
1921 i, newStream->stream_type, newStream->format,
1922 newStream->width, newStream->height, newStream->rotation,
1923 newStream->usage);
1924 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1925 newStream->stream_type == CAMERA3_STREAM_INPUT){
1926 isZsl = true;
1927 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001928 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1929 IS_USAGE_PREVIEW(newStream->usage)) {
1930 isPreview = true;
1931 }
1932
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1934 inputStream = newStream;
1935 }
1936
Emilian Peev7650c122017-01-19 08:24:33 -08001937 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1938 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001939 isJpeg = true;
1940 jpegSize.width = newStream->width;
1941 jpegSize.height = newStream->height;
1942 if (newStream->width > VIDEO_4K_WIDTH ||
1943 newStream->height > VIDEO_4K_HEIGHT)
1944 bJpegExceeds4K = true;
1945 }
1946
1947 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1948 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1949 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001950 // In HAL3 we can have multiple different video streams.
1951 // The variables video width and height are used below as
1952 // dimensions of the biggest of them
1953 if (videoWidth < newStream->width ||
1954 videoHeight < newStream->height) {
1955 videoWidth = newStream->width;
1956 videoHeight = newStream->height;
1957 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001958 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1959 (VIDEO_4K_HEIGHT <= newStream->height)) {
1960 m_bIs4KVideo = true;
1961 }
1962 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1963 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001964
Thierry Strudel3d639192016-09-09 11:52:26 -07001965 }
1966 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1967 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1968 switch (newStream->format) {
1969 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001970 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1971 depthPresent = true;
1972 break;
1973 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001974 stallStreamCnt++;
1975 if (isOnEncoder(maxViewfinderSize, newStream->width,
1976 newStream->height)) {
1977 numStreamsOnEncoder++;
1978 bJpegOnEncoder = true;
1979 }
1980 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1981 newStream->width);
1982 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1983 newStream->height);;
1984 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1985 "FATAL: max_downscale_factor cannot be zero and so assert");
1986 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1987 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1988 LOGH("Setting small jpeg size flag to true");
1989 bSmallJpegSize = true;
1990 }
1991 break;
1992 case HAL_PIXEL_FORMAT_RAW10:
1993 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1994 case HAL_PIXEL_FORMAT_RAW16:
1995 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001996 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1997 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1998 pdStatCount++;
1999 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002000 break;
2001 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2002 processedStreamCnt++;
2003 if (isOnEncoder(maxViewfinderSize, newStream->width,
2004 newStream->height)) {
2005 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2006 !IS_USAGE_ZSL(newStream->usage)) {
2007 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2008 }
2009 numStreamsOnEncoder++;
2010 }
2011 break;
2012 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2013 processedStreamCnt++;
2014 if (isOnEncoder(maxViewfinderSize, newStream->width,
2015 newStream->height)) {
2016 // If Yuv888 size is not greater than 4K, set feature mask
2017 // to SUPERSET so that it support concurrent request on
2018 // YUV and JPEG.
2019 if (newStream->width <= VIDEO_4K_WIDTH &&
2020 newStream->height <= VIDEO_4K_HEIGHT) {
2021 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2022 }
2023 numStreamsOnEncoder++;
2024 numYuv888OnEncoder++;
2025 largeYuv888Size.width = newStream->width;
2026 largeYuv888Size.height = newStream->height;
2027 }
2028 break;
2029 default:
2030 processedStreamCnt++;
2031 if (isOnEncoder(maxViewfinderSize, newStream->width,
2032 newStream->height)) {
2033 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2034 numStreamsOnEncoder++;
2035 }
2036 break;
2037 }
2038
2039 }
2040 }
2041
2042 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2043 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2044 !m_bIsVideo) {
2045 m_bEisEnable = false;
2046 }
2047
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002048 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2049 pthread_mutex_unlock(&mMutex);
2050 return -EINVAL;
2051 }
2052
Thierry Strudel54dc9782017-02-15 12:12:10 -08002053 uint8_t forceEnableTnr = 0;
2054 char tnr_prop[PROPERTY_VALUE_MAX];
2055 memset(tnr_prop, 0, sizeof(tnr_prop));
2056 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2057 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2058
Thierry Strudel3d639192016-09-09 11:52:26 -07002059 /* Logic to enable/disable TNR based on specific config size/etc.*/
2060 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002061 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2062 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002063 else if (forceEnableTnr)
2064 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002065
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002066 char videoHdrProp[PROPERTY_VALUE_MAX];
2067 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2068 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2069 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2070
2071 if (hdr_mode_prop == 1 && m_bIsVideo &&
2072 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2073 m_bVideoHdrEnabled = true;
2074 else
2075 m_bVideoHdrEnabled = false;
2076
2077
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 /* Check if num_streams is sane */
2079 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2080 rawStreamCnt > MAX_RAW_STREAMS ||
2081 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2082 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2083 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2084 pthread_mutex_unlock(&mMutex);
2085 return -EINVAL;
2086 }
2087 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002088 if (isZsl && m_bIs4KVideo) {
2089 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002090 pthread_mutex_unlock(&mMutex);
2091 return -EINVAL;
2092 }
2093 /* Check if stream sizes are sane */
2094 if (numStreamsOnEncoder > 2) {
2095 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2096 pthread_mutex_unlock(&mMutex);
2097 return -EINVAL;
2098 } else if (1 < numStreamsOnEncoder){
2099 bUseCommonFeatureMask = true;
2100 LOGH("Multiple streams above max viewfinder size, common mask needed");
2101 }
2102
2103 /* Check if BLOB size is greater than 4k in 4k recording case */
2104 if (m_bIs4KVideo && bJpegExceeds4K) {
2105 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2106 pthread_mutex_unlock(&mMutex);
2107 return -EINVAL;
2108 }
2109
Emilian Peev7650c122017-01-19 08:24:33 -08002110 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2111 depthPresent) {
2112 LOGE("HAL doesn't support depth streams in HFR mode!");
2113 pthread_mutex_unlock(&mMutex);
2114 return -EINVAL;
2115 }
2116
Thierry Strudel3d639192016-09-09 11:52:26 -07002117 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2118 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2119 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2120 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2121 // configurations:
2122 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2123 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2124 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2125 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2126 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2127 __func__);
2128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131
2132 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2133 // the YUV stream's size is greater or equal to the JPEG size, set common
2134 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2135 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2136 jpegSize.width, jpegSize.height) &&
2137 largeYuv888Size.width > jpegSize.width &&
2138 largeYuv888Size.height > jpegSize.height) {
2139 bYuv888OverrideJpeg = true;
2140 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2141 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2142 }
2143
2144 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2145 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2146 commonFeatureMask);
2147 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2148 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2149
2150 rc = validateStreamDimensions(streamList);
2151 if (rc == NO_ERROR) {
2152 rc = validateStreamRotations(streamList);
2153 }
2154 if (rc != NO_ERROR) {
2155 LOGE("Invalid stream configuration requested!");
2156 pthread_mutex_unlock(&mMutex);
2157 return rc;
2158 }
2159
Emilian Peev0f3c3162017-03-15 12:57:46 +00002160 if (1 < pdStatCount) {
2161 LOGE("HAL doesn't support multiple PD streams");
2162 pthread_mutex_unlock(&mMutex);
2163 return -EINVAL;
2164 }
2165
2166 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2167 (1 == pdStatCount)) {
2168 LOGE("HAL doesn't support PD streams in HFR mode!");
2169 pthread_mutex_unlock(&mMutex);
2170 return -EINVAL;
2171 }
2172
Thierry Strudel3d639192016-09-09 11:52:26 -07002173 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2174 for (size_t i = 0; i < streamList->num_streams; i++) {
2175 camera3_stream_t *newStream = streamList->streams[i];
2176 LOGH("newStream type = %d, stream format = %d "
2177 "stream size : %d x %d, stream rotation = %d",
2178 newStream->stream_type, newStream->format,
2179 newStream->width, newStream->height, newStream->rotation);
2180 //if the stream is in the mStreamList validate it
2181 bool stream_exists = false;
2182 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2183 it != mStreamInfo.end(); it++) {
2184 if ((*it)->stream == newStream) {
2185 QCamera3ProcessingChannel *channel =
2186 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2187 stream_exists = true;
2188 if (channel)
2189 delete channel;
2190 (*it)->status = VALID;
2191 (*it)->stream->priv = NULL;
2192 (*it)->channel = NULL;
2193 }
2194 }
2195 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2196 //new stream
2197 stream_info_t* stream_info;
2198 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2199 if (!stream_info) {
2200 LOGE("Could not allocate stream info");
2201 rc = -ENOMEM;
2202 pthread_mutex_unlock(&mMutex);
2203 return rc;
2204 }
2205 stream_info->stream = newStream;
2206 stream_info->status = VALID;
2207 stream_info->channel = NULL;
2208 mStreamInfo.push_back(stream_info);
2209 }
2210 /* Covers Opaque ZSL and API1 F/W ZSL */
2211 if (IS_USAGE_ZSL(newStream->usage)
2212 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2213 if (zslStream != NULL) {
2214 LOGE("Multiple input/reprocess streams requested!");
2215 pthread_mutex_unlock(&mMutex);
2216 return BAD_VALUE;
2217 }
2218 zslStream = newStream;
2219 }
2220 /* Covers YUV reprocess */
2221 if (inputStream != NULL) {
2222 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2223 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2224 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2225 && inputStream->width == newStream->width
2226 && inputStream->height == newStream->height) {
2227 if (zslStream != NULL) {
2228 /* This scenario indicates multiple YUV streams with same size
2229 * as input stream have been requested, since zsl stream handle
2230 * is solely use for the purpose of overriding the size of streams
2231 * which share h/w streams we will just make a guess here as to
2232 * which of the stream is a ZSL stream, this will be refactored
2233 * once we make generic logic for streams sharing encoder output
2234 */
2235 LOGH("Warning, Multiple ip/reprocess streams requested!");
2236 }
2237 zslStream = newStream;
2238 }
2239 }
2240 }
2241
2242 /* If a zsl stream is set, we know that we have configured at least one input or
2243 bidirectional stream */
2244 if (NULL != zslStream) {
2245 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2246 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2247 mInputStreamInfo.format = zslStream->format;
2248 mInputStreamInfo.usage = zslStream->usage;
2249 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2250 mInputStreamInfo.dim.width,
2251 mInputStreamInfo.dim.height,
2252 mInputStreamInfo.format, mInputStreamInfo.usage);
2253 }
2254
2255 cleanAndSortStreamInfo();
2256 if (mMetadataChannel) {
2257 delete mMetadataChannel;
2258 mMetadataChannel = NULL;
2259 }
2260 if (mSupportChannel) {
2261 delete mSupportChannel;
2262 mSupportChannel = NULL;
2263 }
2264
2265 if (mAnalysisChannel) {
2266 delete mAnalysisChannel;
2267 mAnalysisChannel = NULL;
2268 }
2269
2270 if (mDummyBatchChannel) {
2271 delete mDummyBatchChannel;
2272 mDummyBatchChannel = NULL;
2273 }
2274
Emilian Peev7650c122017-01-19 08:24:33 -08002275 if (mDepthChannel) {
2276 mDepthChannel = NULL;
2277 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002278 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002279
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002280 mShutterDispatcher.clear();
2281 mOutputBufferDispatcher.clear();
2282
Thierry Strudel2896d122017-02-23 19:18:03 -08002283 char is_type_value[PROPERTY_VALUE_MAX];
2284 property_get("persist.camera.is_type", is_type_value, "4");
2285 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2286
Binhao Line406f062017-05-03 14:39:44 -07002287 char property_value[PROPERTY_VALUE_MAX];
2288 property_get("persist.camera.gzoom.at", property_value, "0");
2289 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002290 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2291 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2292 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2293 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002294
2295 property_get("persist.camera.gzoom.4k", property_value, "0");
2296 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2297
Thierry Strudel3d639192016-09-09 11:52:26 -07002298 //Create metadata channel and initialize it
2299 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2300 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2301 gCamCapability[mCameraId]->color_arrangement);
2302 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2303 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002304 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 if (mMetadataChannel == NULL) {
2306 LOGE("failed to allocate metadata channel");
2307 rc = -ENOMEM;
2308 pthread_mutex_unlock(&mMutex);
2309 return rc;
2310 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002311 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002312 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2313 if (rc < 0) {
2314 LOGE("metadata channel initialization failed");
2315 delete mMetadataChannel;
2316 mMetadataChannel = NULL;
2317 pthread_mutex_unlock(&mMutex);
2318 return rc;
2319 }
2320
Thierry Strudel2896d122017-02-23 19:18:03 -08002321 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002322 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002323 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002324 // Keep track of preview/video streams indices.
2325 // There could be more than one preview streams, but only one video stream.
2326 int32_t video_stream_idx = -1;
2327 int32_t preview_stream_idx[streamList->num_streams];
2328 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002329 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2330 /* Allocate channel objects for the requested streams */
2331 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002332
Thierry Strudel3d639192016-09-09 11:52:26 -07002333 camera3_stream_t *newStream = streamList->streams[i];
2334 uint32_t stream_usage = newStream->usage;
2335 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2336 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2337 struct camera_info *p_info = NULL;
2338 pthread_mutex_lock(&gCamLock);
2339 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2340 pthread_mutex_unlock(&gCamLock);
2341 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2342 || IS_USAGE_ZSL(newStream->usage)) &&
2343 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002344 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002345 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002346 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2347 if (bUseCommonFeatureMask)
2348 zsl_ppmask = commonFeatureMask;
2349 else
2350 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002352 if (numStreamsOnEncoder > 0)
2353 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2354 else
2355 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002356 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002357 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002358 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002359 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002360 LOGH("Input stream configured, reprocess config");
2361 } else {
2362 //for non zsl streams find out the format
2363 switch (newStream->format) {
2364 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2365 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002366 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002367 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2368 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2369 /* add additional features to pp feature mask */
2370 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2371 mStreamConfigInfo.num_streams);
2372
2373 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2374 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2375 CAM_STREAM_TYPE_VIDEO;
2376 if (m_bTnrEnabled && m_bTnrVideo) {
2377 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2378 CAM_QCOM_FEATURE_CPP_TNR;
2379 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2380 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2381 ~CAM_QCOM_FEATURE_CDS;
2382 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002383 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2384 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2385 CAM_QTI_FEATURE_PPEISCORE;
2386 }
Binhao Line406f062017-05-03 14:39:44 -07002387 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2388 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2389 CAM_QCOM_FEATURE_GOOG_ZOOM;
2390 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002391 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002392 } else {
2393 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2394 CAM_STREAM_TYPE_PREVIEW;
2395 if (m_bTnrEnabled && m_bTnrPreview) {
2396 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2397 CAM_QCOM_FEATURE_CPP_TNR;
2398 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2399 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2400 ~CAM_QCOM_FEATURE_CDS;
2401 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002402 if(!m_bSwTnrPreview) {
2403 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2404 ~CAM_QTI_FEATURE_SW_TNR;
2405 }
Binhao Line406f062017-05-03 14:39:44 -07002406 if (is_goog_zoom_preview_enabled) {
2407 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2408 CAM_QCOM_FEATURE_GOOG_ZOOM;
2409 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002410 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002411 padding_info.width_padding = mSurfaceStridePadding;
2412 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002413 previewSize.width = (int32_t)newStream->width;
2414 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 }
2416 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2417 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2418 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2419 newStream->height;
2420 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2421 newStream->width;
2422 }
2423 }
2424 break;
2425 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002426 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002427 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2428 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2429 if (bUseCommonFeatureMask)
2430 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2431 commonFeatureMask;
2432 else
2433 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2434 CAM_QCOM_FEATURE_NONE;
2435 } else {
2436 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2437 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2438 }
2439 break;
2440 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002441 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002442 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2443 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2444 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2445 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2446 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002447 /* Remove rotation if it is not supported
2448 for 4K LiveVideo snapshot case (online processing) */
2449 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2450 CAM_QCOM_FEATURE_ROTATION)) {
2451 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2452 &= ~CAM_QCOM_FEATURE_ROTATION;
2453 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002454 } else {
2455 if (bUseCommonFeatureMask &&
2456 isOnEncoder(maxViewfinderSize, newStream->width,
2457 newStream->height)) {
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2459 } else {
2460 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2461 }
2462 }
2463 if (isZsl) {
2464 if (zslStream) {
2465 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2466 (int32_t)zslStream->width;
2467 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2468 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002469 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2470 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002471 } else {
2472 LOGE("Error, No ZSL stream identified");
2473 pthread_mutex_unlock(&mMutex);
2474 return -EINVAL;
2475 }
2476 } else if (m_bIs4KVideo) {
2477 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2478 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2479 } else if (bYuv888OverrideJpeg) {
2480 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2481 (int32_t)largeYuv888Size.width;
2482 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2483 (int32_t)largeYuv888Size.height;
2484 }
2485 break;
2486 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2487 case HAL_PIXEL_FORMAT_RAW16:
2488 case HAL_PIXEL_FORMAT_RAW10:
2489 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2490 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2491 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002492 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2493 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2494 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2495 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2496 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2497 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2498 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2499 gCamCapability[mCameraId]->dt[mPDIndex];
2500 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2501 gCamCapability[mCameraId]->vc[mPDIndex];
2502 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002503 break;
2504 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002505 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002506 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2508 break;
2509 }
2510 }
2511
2512 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2513 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2514 gCamCapability[mCameraId]->color_arrangement);
2515
2516 if (newStream->priv == NULL) {
2517 //New stream, construct channel
2518 switch (newStream->stream_type) {
2519 case CAMERA3_STREAM_INPUT:
2520 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2521 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2522 break;
2523 case CAMERA3_STREAM_BIDIRECTIONAL:
2524 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2525 GRALLOC_USAGE_HW_CAMERA_WRITE;
2526 break;
2527 case CAMERA3_STREAM_OUTPUT:
2528 /* For video encoding stream, set read/write rarely
2529 * flag so that they may be set to un-cached */
2530 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2531 newStream->usage |=
2532 (GRALLOC_USAGE_SW_READ_RARELY |
2533 GRALLOC_USAGE_SW_WRITE_RARELY |
2534 GRALLOC_USAGE_HW_CAMERA_WRITE);
2535 else if (IS_USAGE_ZSL(newStream->usage))
2536 {
2537 LOGD("ZSL usage flag skipping");
2538 }
2539 else if (newStream == zslStream
2540 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2541 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2542 } else
2543 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2544 break;
2545 default:
2546 LOGE("Invalid stream_type %d", newStream->stream_type);
2547 break;
2548 }
2549
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002550 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002551 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2552 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2553 QCamera3ProcessingChannel *channel = NULL;
2554 switch (newStream->format) {
2555 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2556 if ((newStream->usage &
2557 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2558 (streamList->operation_mode ==
2559 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2560 ) {
2561 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2562 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002563 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002564 this,
2565 newStream,
2566 (cam_stream_type_t)
2567 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2568 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2569 mMetadataChannel,
2570 0); //heap buffers are not required for HFR video channel
2571 if (channel == NULL) {
2572 LOGE("allocation of channel failed");
2573 pthread_mutex_unlock(&mMutex);
2574 return -ENOMEM;
2575 }
2576 //channel->getNumBuffers() will return 0 here so use
2577 //MAX_INFLIGH_HFR_REQUESTS
2578 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2579 newStream->priv = channel;
2580 LOGI("num video buffers in HFR mode: %d",
2581 MAX_INFLIGHT_HFR_REQUESTS);
2582 } else {
2583 /* Copy stream contents in HFR preview only case to create
2584 * dummy batch channel so that sensor streaming is in
2585 * HFR mode */
2586 if (!m_bIsVideo && (streamList->operation_mode ==
2587 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2588 mDummyBatchStream = *newStream;
2589 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002590 int bufferCount = MAX_INFLIGHT_REQUESTS;
2591 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2592 CAM_STREAM_TYPE_VIDEO) {
2593 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2594 bufferCount = MAX_VIDEO_BUFFERS;
2595 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002596 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2597 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002598 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002599 this,
2600 newStream,
2601 (cam_stream_type_t)
2602 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2603 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2604 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002605 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002606 if (channel == NULL) {
2607 LOGE("allocation of channel failed");
2608 pthread_mutex_unlock(&mMutex);
2609 return -ENOMEM;
2610 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002611 /* disable UBWC for preview, though supported,
2612 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002613 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002614 (previewSize.width == (int32_t)videoWidth)&&
2615 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002616 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002617 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002619 /* When goog_zoom is linked to the preview or video stream,
2620 * disable ubwc to the linked stream */
2621 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2622 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2623 channel->setUBWCEnabled(false);
2624 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002625 newStream->max_buffers = channel->getNumBuffers();
2626 newStream->priv = channel;
2627 }
2628 break;
2629 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2630 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2631 mChannelHandle,
2632 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002633 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002634 this,
2635 newStream,
2636 (cam_stream_type_t)
2637 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2638 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2639 mMetadataChannel);
2640 if (channel == NULL) {
2641 LOGE("allocation of YUV channel failed");
2642 pthread_mutex_unlock(&mMutex);
2643 return -ENOMEM;
2644 }
2645 newStream->max_buffers = channel->getNumBuffers();
2646 newStream->priv = channel;
2647 break;
2648 }
2649 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2650 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002651 case HAL_PIXEL_FORMAT_RAW10: {
2652 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2653 (HAL_DATASPACE_DEPTH != newStream->data_space))
2654 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002655 mRawChannel = new QCamera3RawChannel(
2656 mCameraHandle->camera_handle, mChannelHandle,
2657 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002658 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002659 this, newStream,
2660 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002661 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002662 if (mRawChannel == NULL) {
2663 LOGE("allocation of raw channel failed");
2664 pthread_mutex_unlock(&mMutex);
2665 return -ENOMEM;
2666 }
2667 newStream->max_buffers = mRawChannel->getNumBuffers();
2668 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2669 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002670 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002672 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2673 mDepthChannel = new QCamera3DepthChannel(
2674 mCameraHandle->camera_handle, mChannelHandle,
2675 mCameraHandle->ops, NULL, NULL, &padding_info,
2676 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2677 mMetadataChannel);
2678 if (NULL == mDepthChannel) {
2679 LOGE("Allocation of depth channel failed");
2680 pthread_mutex_unlock(&mMutex);
2681 return NO_MEMORY;
2682 }
2683 newStream->priv = mDepthChannel;
2684 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2685 } else {
2686 // Max live snapshot inflight buffer is 1. This is to mitigate
2687 // frame drop issues for video snapshot. The more buffers being
2688 // allocated, the more frame drops there are.
2689 mPictureChannel = new QCamera3PicChannel(
2690 mCameraHandle->camera_handle, mChannelHandle,
2691 mCameraHandle->ops, captureResultCb,
2692 setBufferErrorStatus, &padding_info, this, newStream,
2693 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2694 m_bIs4KVideo, isZsl, mMetadataChannel,
2695 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2696 if (mPictureChannel == NULL) {
2697 LOGE("allocation of channel failed");
2698 pthread_mutex_unlock(&mMutex);
2699 return -ENOMEM;
2700 }
2701 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2702 newStream->max_buffers = mPictureChannel->getNumBuffers();
2703 mPictureChannel->overrideYuvSize(
2704 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2705 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002707 break;
2708
2709 default:
2710 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002711 pthread_mutex_unlock(&mMutex);
2712 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002713 }
2714 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2715 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2716 } else {
2717 LOGE("Error, Unknown stream type");
2718 pthread_mutex_unlock(&mMutex);
2719 return -EINVAL;
2720 }
2721
2722 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002723 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002724 // Here we only care whether it's EIS3 or not
2725 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2726 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2727 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2728 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002729 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002730 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002731 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002732 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2733 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2734 }
2735 }
2736
2737 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2738 it != mStreamInfo.end(); it++) {
2739 if ((*it)->stream == newStream) {
2740 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2741 break;
2742 }
2743 }
2744 } else {
2745 // Channel already exists for this stream
2746 // Do nothing for now
2747 }
2748 padding_info = gCamCapability[mCameraId]->padding_info;
2749
Emilian Peev7650c122017-01-19 08:24:33 -08002750 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002751 * since there is no real stream associated with it
2752 */
Emilian Peev7650c122017-01-19 08:24:33 -08002753 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002754 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2755 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002756 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002757 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002758 }
2759
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002760 // Let buffer dispatcher know the configured streams.
2761 mOutputBufferDispatcher.configureStreams(streamList);
2762
Binhao Lincdb362a2017-04-20 13:31:54 -07002763 // By default, preview stream TNR is disabled.
2764 // Enable TNR to the preview stream if all conditions below are satisfied:
2765 // 1. resolution <= 1080p.
2766 // 2. preview resolution == video resolution.
2767 // 3. video stream TNR is enabled.
2768 // 4. EIS2.0
2769 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2770 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2771 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2772 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2773 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2774 video_stream->width == preview_stream->width &&
2775 video_stream->height == preview_stream->height) {
2776 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2777 CAM_QCOM_FEATURE_CPP_TNR;
2778 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2779 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2780 ~CAM_QCOM_FEATURE_CDS;
2781 }
2782 }
2783
Thierry Strudel2896d122017-02-23 19:18:03 -08002784 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2785 onlyRaw = false;
2786 }
2787
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002788 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002789 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002790 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002791 cam_analysis_info_t analysisInfo;
2792 int32_t ret = NO_ERROR;
2793 ret = mCommon.getAnalysisInfo(
2794 FALSE,
2795 analysisFeatureMask,
2796 &analysisInfo);
2797 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002798 cam_color_filter_arrangement_t analysis_color_arrangement =
2799 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2800 CAM_FILTER_ARRANGEMENT_Y :
2801 gCamCapability[mCameraId]->color_arrangement);
2802 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2803 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002804 cam_dimension_t analysisDim;
2805 analysisDim = mCommon.getMatchingDimension(previewSize,
2806 analysisInfo.analysis_recommended_res);
2807
2808 mAnalysisChannel = new QCamera3SupportChannel(
2809 mCameraHandle->camera_handle,
2810 mChannelHandle,
2811 mCameraHandle->ops,
2812 &analysisInfo.analysis_padding_info,
2813 analysisFeatureMask,
2814 CAM_STREAM_TYPE_ANALYSIS,
2815 &analysisDim,
2816 (analysisInfo.analysis_format
2817 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2818 : CAM_FORMAT_YUV_420_NV21),
2819 analysisInfo.hw_analysis_supported,
2820 gCamCapability[mCameraId]->color_arrangement,
2821 this,
2822 0); // force buffer count to 0
2823 } else {
2824 LOGW("getAnalysisInfo failed, ret = %d", ret);
2825 }
2826 if (!mAnalysisChannel) {
2827 LOGW("Analysis channel cannot be created");
2828 }
2829 }
2830
Thierry Strudel3d639192016-09-09 11:52:26 -07002831 //RAW DUMP channel
2832 if (mEnableRawDump && isRawStreamRequested == false){
2833 cam_dimension_t rawDumpSize;
2834 rawDumpSize = getMaxRawSize(mCameraId);
2835 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2836 setPAAFSupport(rawDumpFeatureMask,
2837 CAM_STREAM_TYPE_RAW,
2838 gCamCapability[mCameraId]->color_arrangement);
2839 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2840 mChannelHandle,
2841 mCameraHandle->ops,
2842 rawDumpSize,
2843 &padding_info,
2844 this, rawDumpFeatureMask);
2845 if (!mRawDumpChannel) {
2846 LOGE("Raw Dump channel cannot be created");
2847 pthread_mutex_unlock(&mMutex);
2848 return -ENOMEM;
2849 }
2850 }
2851
Thierry Strudel3d639192016-09-09 11:52:26 -07002852 if (mAnalysisChannel) {
2853 cam_analysis_info_t analysisInfo;
2854 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2855 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2856 CAM_STREAM_TYPE_ANALYSIS;
2857 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2858 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002859 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002860 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2861 &analysisInfo);
2862 if (rc != NO_ERROR) {
2863 LOGE("getAnalysisInfo failed, ret = %d", rc);
2864 pthread_mutex_unlock(&mMutex);
2865 return rc;
2866 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002867 cam_color_filter_arrangement_t analysis_color_arrangement =
2868 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2869 CAM_FILTER_ARRANGEMENT_Y :
2870 gCamCapability[mCameraId]->color_arrangement);
2871 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2872 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2873 analysis_color_arrangement);
2874
Thierry Strudel3d639192016-09-09 11:52:26 -07002875 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002876 mCommon.getMatchingDimension(previewSize,
2877 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002878 mStreamConfigInfo.num_streams++;
2879 }
2880
Thierry Strudel2896d122017-02-23 19:18:03 -08002881 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 cam_analysis_info_t supportInfo;
2883 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2884 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2885 setPAAFSupport(callbackFeatureMask,
2886 CAM_STREAM_TYPE_CALLBACK,
2887 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002888 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002889 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002890 if (ret != NO_ERROR) {
2891 /* Ignore the error for Mono camera
2892 * because the PAAF bit mask is only set
2893 * for CAM_STREAM_TYPE_ANALYSIS stream type
2894 */
2895 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2896 LOGW("getAnalysisInfo failed, ret = %d", ret);
2897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002898 }
2899 mSupportChannel = new QCamera3SupportChannel(
2900 mCameraHandle->camera_handle,
2901 mChannelHandle,
2902 mCameraHandle->ops,
2903 &gCamCapability[mCameraId]->padding_info,
2904 callbackFeatureMask,
2905 CAM_STREAM_TYPE_CALLBACK,
2906 &QCamera3SupportChannel::kDim,
2907 CAM_FORMAT_YUV_420_NV21,
2908 supportInfo.hw_analysis_supported,
2909 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002910 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 if (!mSupportChannel) {
2912 LOGE("dummy channel cannot be created");
2913 pthread_mutex_unlock(&mMutex);
2914 return -ENOMEM;
2915 }
2916 }
2917
2918 if (mSupportChannel) {
2919 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2920 QCamera3SupportChannel::kDim;
2921 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2922 CAM_STREAM_TYPE_CALLBACK;
2923 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2924 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2925 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2926 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2927 gCamCapability[mCameraId]->color_arrangement);
2928 mStreamConfigInfo.num_streams++;
2929 }
2930
2931 if (mRawDumpChannel) {
2932 cam_dimension_t rawSize;
2933 rawSize = getMaxRawSize(mCameraId);
2934 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2935 rawSize;
2936 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2937 CAM_STREAM_TYPE_RAW;
2938 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2939 CAM_QCOM_FEATURE_NONE;
2940 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2941 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2942 gCamCapability[mCameraId]->color_arrangement);
2943 mStreamConfigInfo.num_streams++;
2944 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002945
2946 if (mHdrPlusRawSrcChannel) {
2947 cam_dimension_t rawSize;
2948 rawSize = getMaxRawSize(mCameraId);
2949 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2950 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2951 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2952 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2953 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2954 gCamCapability[mCameraId]->color_arrangement);
2955 mStreamConfigInfo.num_streams++;
2956 }
2957
Thierry Strudel3d639192016-09-09 11:52:26 -07002958 /* In HFR mode, if video stream is not added, create a dummy channel so that
2959 * ISP can create a batch mode even for preview only case. This channel is
2960 * never 'start'ed (no stream-on), it is only 'initialized' */
2961 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2962 !m_bIsVideo) {
2963 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2964 setPAAFSupport(dummyFeatureMask,
2965 CAM_STREAM_TYPE_VIDEO,
2966 gCamCapability[mCameraId]->color_arrangement);
2967 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2968 mChannelHandle,
2969 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002970 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002971 this,
2972 &mDummyBatchStream,
2973 CAM_STREAM_TYPE_VIDEO,
2974 dummyFeatureMask,
2975 mMetadataChannel);
2976 if (NULL == mDummyBatchChannel) {
2977 LOGE("creation of mDummyBatchChannel failed."
2978 "Preview will use non-hfr sensor mode ");
2979 }
2980 }
2981 if (mDummyBatchChannel) {
2982 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2983 mDummyBatchStream.width;
2984 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2985 mDummyBatchStream.height;
2986 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2987 CAM_STREAM_TYPE_VIDEO;
2988 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2989 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2990 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2991 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2992 gCamCapability[mCameraId]->color_arrangement);
2993 mStreamConfigInfo.num_streams++;
2994 }
2995
2996 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2997 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002998 m_bIs4KVideo ? 0 :
2999 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003000
3001 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3002 for (pendingRequestIterator i = mPendingRequestsList.begin();
3003 i != mPendingRequestsList.end();) {
3004 i = erasePendingRequest(i);
3005 }
3006 mPendingFrameDropList.clear();
3007 // Initialize/Reset the pending buffers list
3008 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3009 req.mPendingBufferList.clear();
3010 }
3011 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3012
Thierry Strudel3d639192016-09-09 11:52:26 -07003013 mCurJpegMeta.clear();
3014 //Get min frame duration for this streams configuration
3015 deriveMinFrameDuration();
3016
Chien-Yu Chenee335912017-02-09 17:53:20 -08003017 mFirstPreviewIntentSeen = false;
3018
3019 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003020 {
3021 Mutex::Autolock l(gHdrPlusClientLock);
3022 disableHdrPlusModeLocked();
3023 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003024
Thierry Strudel3d639192016-09-09 11:52:26 -07003025 // Update state
3026 mState = CONFIGURED;
3027
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003028 mFirstMetadataCallback = true;
3029
Thierry Strudel3d639192016-09-09 11:52:26 -07003030 pthread_mutex_unlock(&mMutex);
3031
3032 return rc;
3033}
3034
3035/*===========================================================================
3036 * FUNCTION : validateCaptureRequest
3037 *
3038 * DESCRIPTION: validate a capture request from camera service
3039 *
3040 * PARAMETERS :
3041 * @request : request from framework to process
3042 *
3043 * RETURN :
3044 *
3045 *==========================================================================*/
3046int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003047 camera3_capture_request_t *request,
3048 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003049{
3050 ssize_t idx = 0;
3051 const camera3_stream_buffer_t *b;
3052 CameraMetadata meta;
3053
3054 /* Sanity check the request */
3055 if (request == NULL) {
3056 LOGE("NULL capture request");
3057 return BAD_VALUE;
3058 }
3059
3060 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3061 /*settings cannot be null for the first request*/
3062 return BAD_VALUE;
3063 }
3064
3065 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003066 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3067 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003068 LOGE("Request %d: No output buffers provided!",
3069 __FUNCTION__, frameNumber);
3070 return BAD_VALUE;
3071 }
3072 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3073 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3074 request->num_output_buffers, MAX_NUM_STREAMS);
3075 return BAD_VALUE;
3076 }
3077 if (request->input_buffer != NULL) {
3078 b = request->input_buffer;
3079 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3080 LOGE("Request %d: Buffer %ld: Status not OK!",
3081 frameNumber, (long)idx);
3082 return BAD_VALUE;
3083 }
3084 if (b->release_fence != -1) {
3085 LOGE("Request %d: Buffer %ld: Has a release fence!",
3086 frameNumber, (long)idx);
3087 return BAD_VALUE;
3088 }
3089 if (b->buffer == NULL) {
3090 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3091 frameNumber, (long)idx);
3092 return BAD_VALUE;
3093 }
3094 }
3095
3096 // Validate all buffers
3097 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003098 if (b == NULL) {
3099 return BAD_VALUE;
3100 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003101 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003102 QCamera3ProcessingChannel *channel =
3103 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3104 if (channel == NULL) {
3105 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3106 frameNumber, (long)idx);
3107 return BAD_VALUE;
3108 }
3109 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3110 LOGE("Request %d: Buffer %ld: Status not OK!",
3111 frameNumber, (long)idx);
3112 return BAD_VALUE;
3113 }
3114 if (b->release_fence != -1) {
3115 LOGE("Request %d: Buffer %ld: Has a release fence!",
3116 frameNumber, (long)idx);
3117 return BAD_VALUE;
3118 }
3119 if (b->buffer == NULL) {
3120 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3121 frameNumber, (long)idx);
3122 return BAD_VALUE;
3123 }
3124 if (*(b->buffer) == NULL) {
3125 LOGE("Request %d: Buffer %ld: NULL private handle!",
3126 frameNumber, (long)idx);
3127 return BAD_VALUE;
3128 }
3129 idx++;
3130 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003131 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003132 return NO_ERROR;
3133}
3134
3135/*===========================================================================
3136 * FUNCTION : deriveMinFrameDuration
3137 *
3138 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3139 * on currently configured streams.
3140 *
3141 * PARAMETERS : NONE
3142 *
3143 * RETURN : NONE
3144 *
3145 *==========================================================================*/
3146void QCamera3HardwareInterface::deriveMinFrameDuration()
3147{
3148 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3149
3150 maxJpegDim = 0;
3151 maxProcessedDim = 0;
3152 maxRawDim = 0;
3153
3154 // Figure out maximum jpeg, processed, and raw dimensions
3155 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3156 it != mStreamInfo.end(); it++) {
3157
3158 // Input stream doesn't have valid stream_type
3159 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3160 continue;
3161
3162 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3163 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3164 if (dimension > maxJpegDim)
3165 maxJpegDim = dimension;
3166 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3167 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3168 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3169 if (dimension > maxRawDim)
3170 maxRawDim = dimension;
3171 } else {
3172 if (dimension > maxProcessedDim)
3173 maxProcessedDim = dimension;
3174 }
3175 }
3176
3177 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3178 MAX_SIZES_CNT);
3179
3180 //Assume all jpeg dimensions are in processed dimensions.
3181 if (maxJpegDim > maxProcessedDim)
3182 maxProcessedDim = maxJpegDim;
3183 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3184 if (maxProcessedDim > maxRawDim) {
3185 maxRawDim = INT32_MAX;
3186
3187 for (size_t i = 0; i < count; i++) {
3188 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3189 gCamCapability[mCameraId]->raw_dim[i].height;
3190 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3191 maxRawDim = dimension;
3192 }
3193 }
3194
3195 //Find minimum durations for processed, jpeg, and raw
3196 for (size_t i = 0; i < count; i++) {
3197 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3198 gCamCapability[mCameraId]->raw_dim[i].height) {
3199 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3200 break;
3201 }
3202 }
3203 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3204 for (size_t i = 0; i < count; i++) {
3205 if (maxProcessedDim ==
3206 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3207 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3208 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3209 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3210 break;
3211 }
3212 }
3213}
3214
3215/*===========================================================================
3216 * FUNCTION : getMinFrameDuration
3217 *
3218 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3219 * and current request configuration.
3220 *
3221 * PARAMETERS : @request: requset sent by the frameworks
3222 *
3223 * RETURN : min farme duration for a particular request
3224 *
3225 *==========================================================================*/
3226int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3227{
3228 bool hasJpegStream = false;
3229 bool hasRawStream = false;
3230 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3231 const camera3_stream_t *stream = request->output_buffers[i].stream;
3232 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3233 hasJpegStream = true;
3234 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3235 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3236 stream->format == HAL_PIXEL_FORMAT_RAW16)
3237 hasRawStream = true;
3238 }
3239
3240 if (!hasJpegStream)
3241 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3242 else
3243 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3244}
3245
3246/*===========================================================================
3247 * FUNCTION : handleBuffersDuringFlushLock
3248 *
3249 * DESCRIPTION: Account for buffers returned from back-end during flush
3250 * This function is executed while mMutex is held by the caller.
3251 *
3252 * PARAMETERS :
3253 * @buffer: image buffer for the callback
3254 *
3255 * RETURN :
3256 *==========================================================================*/
3257void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3258{
3259 bool buffer_found = false;
3260 for (List<PendingBuffersInRequest>::iterator req =
3261 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3262 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3263 for (List<PendingBufferInfo>::iterator i =
3264 req->mPendingBufferList.begin();
3265 i != req->mPendingBufferList.end(); i++) {
3266 if (i->buffer == buffer->buffer) {
3267 mPendingBuffersMap.numPendingBufsAtFlush--;
3268 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3269 buffer->buffer, req->frame_number,
3270 mPendingBuffersMap.numPendingBufsAtFlush);
3271 buffer_found = true;
3272 break;
3273 }
3274 }
3275 if (buffer_found) {
3276 break;
3277 }
3278 }
3279 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3280 //signal the flush()
3281 LOGD("All buffers returned to HAL. Continue flush");
3282 pthread_cond_signal(&mBuffersCond);
3283 }
3284}
3285
Thierry Strudel3d639192016-09-09 11:52:26 -07003286/*===========================================================================
3287 * FUNCTION : handleBatchMetadata
3288 *
3289 * DESCRIPTION: Handles metadata buffer callback in batch mode
3290 *
3291 * PARAMETERS : @metadata_buf: metadata buffer
3292 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3293 * the meta buf in this method
3294 *
3295 * RETURN :
3296 *
3297 *==========================================================================*/
3298void QCamera3HardwareInterface::handleBatchMetadata(
3299 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3300{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003301 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003302
3303 if (NULL == metadata_buf) {
3304 LOGE("metadata_buf is NULL");
3305 return;
3306 }
3307 /* In batch mode, the metdata will contain the frame number and timestamp of
3308 * the last frame in the batch. Eg: a batch containing buffers from request
3309 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3310 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3311 * multiple process_capture_results */
3312 metadata_buffer_t *metadata =
3313 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3314 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3315 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3316 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3317 uint32_t frame_number = 0, urgent_frame_number = 0;
3318 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3319 bool invalid_metadata = false;
3320 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3321 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003322 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003323
3324 int32_t *p_frame_number_valid =
3325 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3326 uint32_t *p_frame_number =
3327 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3328 int64_t *p_capture_time =
3329 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3330 int32_t *p_urgent_frame_number_valid =
3331 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3332 uint32_t *p_urgent_frame_number =
3333 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3334
3335 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3336 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3337 (NULL == p_urgent_frame_number)) {
3338 LOGE("Invalid metadata");
3339 invalid_metadata = true;
3340 } else {
3341 frame_number_valid = *p_frame_number_valid;
3342 last_frame_number = *p_frame_number;
3343 last_frame_capture_time = *p_capture_time;
3344 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3345 last_urgent_frame_number = *p_urgent_frame_number;
3346 }
3347
3348 /* In batchmode, when no video buffers are requested, set_parms are sent
3349 * for every capture_request. The difference between consecutive urgent
3350 * frame numbers and frame numbers should be used to interpolate the
3351 * corresponding frame numbers and time stamps */
3352 pthread_mutex_lock(&mMutex);
3353 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003354 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3355 if(idx < 0) {
3356 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3357 last_urgent_frame_number);
3358 mState = ERROR;
3359 pthread_mutex_unlock(&mMutex);
3360 return;
3361 }
3362 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003363 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3364 first_urgent_frame_number;
3365
3366 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3367 urgent_frame_number_valid,
3368 first_urgent_frame_number, last_urgent_frame_number);
3369 }
3370
3371 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003372 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3373 if(idx < 0) {
3374 LOGE("Invalid frame number received: %d. Irrecoverable error",
3375 last_frame_number);
3376 mState = ERROR;
3377 pthread_mutex_unlock(&mMutex);
3378 return;
3379 }
3380 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003381 frameNumDiff = last_frame_number + 1 -
3382 first_frame_number;
3383 mPendingBatchMap.removeItem(last_frame_number);
3384
3385 LOGD("frm: valid: %d frm_num: %d - %d",
3386 frame_number_valid,
3387 first_frame_number, last_frame_number);
3388
3389 }
3390 pthread_mutex_unlock(&mMutex);
3391
3392 if (urgent_frame_number_valid || frame_number_valid) {
3393 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3394 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3395 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3396 urgentFrameNumDiff, last_urgent_frame_number);
3397 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3398 LOGE("frameNumDiff: %d frameNum: %d",
3399 frameNumDiff, last_frame_number);
3400 }
3401
3402 for (size_t i = 0; i < loopCount; i++) {
3403 /* handleMetadataWithLock is called even for invalid_metadata for
3404 * pipeline depth calculation */
3405 if (!invalid_metadata) {
3406 /* Infer frame number. Batch metadata contains frame number of the
3407 * last frame */
3408 if (urgent_frame_number_valid) {
3409 if (i < urgentFrameNumDiff) {
3410 urgent_frame_number =
3411 first_urgent_frame_number + i;
3412 LOGD("inferred urgent frame_number: %d",
3413 urgent_frame_number);
3414 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3415 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3416 } else {
3417 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3418 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3419 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3420 }
3421 }
3422
3423 /* Infer frame number. Batch metadata contains frame number of the
3424 * last frame */
3425 if (frame_number_valid) {
3426 if (i < frameNumDiff) {
3427 frame_number = first_frame_number + i;
3428 LOGD("inferred frame_number: %d", frame_number);
3429 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3430 CAM_INTF_META_FRAME_NUMBER, frame_number);
3431 } else {
3432 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3433 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3434 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3435 }
3436 }
3437
3438 if (last_frame_capture_time) {
3439 //Infer timestamp
3440 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003441 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003442 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003443 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003444 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3445 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3446 LOGD("batch capture_time: %lld, capture_time: %lld",
3447 last_frame_capture_time, capture_time);
3448 }
3449 }
3450 pthread_mutex_lock(&mMutex);
3451 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003452 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003453 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3454 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003455 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003456 pthread_mutex_unlock(&mMutex);
3457 }
3458
3459 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003460 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003461 mMetadataChannel->bufDone(metadata_buf);
3462 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003463 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003464 }
3465}
3466
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003467void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3468 camera3_error_msg_code_t errorCode)
3469{
3470 camera3_notify_msg_t notify_msg;
3471 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3472 notify_msg.type = CAMERA3_MSG_ERROR;
3473 notify_msg.message.error.error_code = errorCode;
3474 notify_msg.message.error.error_stream = NULL;
3475 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003476 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003477
3478 return;
3479}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003480
3481/*===========================================================================
3482 * FUNCTION : sendPartialMetadataWithLock
3483 *
3484 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3485 *
3486 * PARAMETERS : @metadata: metadata buffer
3487 * @requestIter: The iterator for the pending capture request for
3488 * which the partial result is being sen
3489 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3490 * last urgent metadata in a batch. Always true for non-batch mode
3491 *
3492 * RETURN :
3493 *
3494 *==========================================================================*/
3495
3496void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3497 metadata_buffer_t *metadata,
3498 const pendingRequestIterator requestIter,
3499 bool lastUrgentMetadataInBatch)
3500{
3501 camera3_capture_result_t result;
3502 memset(&result, 0, sizeof(camera3_capture_result_t));
3503
3504 requestIter->partial_result_cnt++;
3505
3506 // Extract 3A metadata
3507 result.result = translateCbUrgentMetadataToResultMetadata(
3508 metadata, lastUrgentMetadataInBatch);
3509 // Populate metadata result
3510 result.frame_number = requestIter->frame_number;
3511 result.num_output_buffers = 0;
3512 result.output_buffers = NULL;
3513 result.partial_result = requestIter->partial_result_cnt;
3514
3515 {
3516 Mutex::Autolock l(gHdrPlusClientLock);
3517 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3518 // Notify HDR+ client about the partial metadata.
3519 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3520 result.partial_result == PARTIAL_RESULT_COUNT);
3521 }
3522 }
3523
3524 orchestrateResult(&result);
3525 LOGD("urgent frame_number = %u", result.frame_number);
3526 free_camera_metadata((camera_metadata_t *)result.result);
3527}
3528
Thierry Strudel3d639192016-09-09 11:52:26 -07003529/*===========================================================================
3530 * FUNCTION : handleMetadataWithLock
3531 *
3532 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3533 *
3534 * PARAMETERS : @metadata_buf: metadata buffer
3535 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3536 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003537 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3538 * last urgent metadata in a batch. Always true for non-batch mode
3539 * @lastMetadataInBatch: Boolean to indicate whether this is the
3540 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003541 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3542 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003543 *
3544 * RETURN :
3545 *
3546 *==========================================================================*/
3547void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003548 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003549 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3550 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003551{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003552 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003553 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3554 //during flush do not send metadata from this thread
3555 LOGD("not sending metadata during flush or when mState is error");
3556 if (free_and_bufdone_meta_buf) {
3557 mMetadataChannel->bufDone(metadata_buf);
3558 free(metadata_buf);
3559 }
3560 return;
3561 }
3562
3563 //not in flush
3564 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3565 int32_t frame_number_valid, urgent_frame_number_valid;
3566 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003567 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003568 nsecs_t currentSysTime;
3569
3570 int32_t *p_frame_number_valid =
3571 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3572 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3573 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003574 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003575 int32_t *p_urgent_frame_number_valid =
3576 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3577 uint32_t *p_urgent_frame_number =
3578 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3579 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3580 metadata) {
3581 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3582 *p_frame_number_valid, *p_frame_number);
3583 }
3584
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003585 camera_metadata_t *resultMetadata = nullptr;
3586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3588 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3589 LOGE("Invalid metadata");
3590 if (free_and_bufdone_meta_buf) {
3591 mMetadataChannel->bufDone(metadata_buf);
3592 free(metadata_buf);
3593 }
3594 goto done_metadata;
3595 }
3596 frame_number_valid = *p_frame_number_valid;
3597 frame_number = *p_frame_number;
3598 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003599 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003600 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3601 urgent_frame_number = *p_urgent_frame_number;
3602 currentSysTime = systemTime(CLOCK_MONOTONIC);
3603
Jason Lee603176d2017-05-31 11:43:27 -07003604 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3605 const int tries = 3;
3606 nsecs_t bestGap, measured;
3607 for (int i = 0; i < tries; ++i) {
3608 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3609 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3610 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3611 const nsecs_t gap = tmono2 - tmono;
3612 if (i == 0 || gap < bestGap) {
3613 bestGap = gap;
3614 measured = tbase - ((tmono + tmono2) >> 1);
3615 }
3616 }
3617 capture_time -= measured;
3618 }
3619
Thierry Strudel3d639192016-09-09 11:52:26 -07003620 // Detect if buffers from any requests are overdue
3621 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003622 int64_t timeout;
3623 {
3624 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3625 // If there is a pending HDR+ request, the following requests may be blocked until the
3626 // HDR+ request is done. So allow a longer timeout.
3627 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3628 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3629 }
3630
3631 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003632 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003633 assert(missed.stream->priv);
3634 if (missed.stream->priv) {
3635 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3636 assert(ch->mStreams[0]);
3637 if (ch->mStreams[0]) {
3638 LOGE("Cancel missing frame = %d, buffer = %p,"
3639 "stream type = %d, stream format = %d",
3640 req.frame_number, missed.buffer,
3641 ch->mStreams[0]->getMyType(), missed.stream->format);
3642 ch->timeoutFrame(req.frame_number);
3643 }
3644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 }
3646 }
3647 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003648 //For the very first metadata callback, regardless whether it contains valid
3649 //frame number, send the partial metadata for the jumpstarting requests.
3650 //Note that this has to be done even if the metadata doesn't contain valid
3651 //urgent frame number, because in the case only 1 request is ever submitted
3652 //to HAL, there won't be subsequent valid urgent frame number.
3653 if (mFirstMetadataCallback) {
3654 for (pendingRequestIterator i =
3655 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3656 if (i->bUseFirstPartial) {
3657 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3658 }
3659 }
3660 mFirstMetadataCallback = false;
3661 }
3662
Thierry Strudel3d639192016-09-09 11:52:26 -07003663 //Partial result on process_capture_result for timestamp
3664 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003665 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003666
3667 //Recieved an urgent Frame Number, handle it
3668 //using partial results
3669 for (pendingRequestIterator i =
3670 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3671 LOGD("Iterator Frame = %d urgent frame = %d",
3672 i->frame_number, urgent_frame_number);
3673
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003674 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003675 (i->partial_result_cnt == 0)) {
3676 LOGE("Error: HAL missed urgent metadata for frame number %d",
3677 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003678 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003679 }
3680
3681 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003682 i->partial_result_cnt == 0) {
3683 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003684 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3685 // Instant AEC settled for this frame.
3686 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3687 mInstantAECSettledFrameNumber = urgent_frame_number;
3688 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003689 break;
3690 }
3691 }
3692 }
3693
3694 if (!frame_number_valid) {
3695 LOGD("Not a valid normal frame number, used as SOF only");
3696 if (free_and_bufdone_meta_buf) {
3697 mMetadataChannel->bufDone(metadata_buf);
3698 free(metadata_buf);
3699 }
3700 goto done_metadata;
3701 }
3702 LOGH("valid frame_number = %u, capture_time = %lld",
3703 frame_number, capture_time);
3704
Emilian Peev7650c122017-01-19 08:24:33 -08003705 if (metadata->is_depth_data_valid) {
3706 handleDepthDataLocked(metadata->depth_data, frame_number);
3707 }
3708
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003709 // Check whether any stream buffer corresponding to this is dropped or not
3710 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3711 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3712 for (auto & pendingRequest : mPendingRequestsList) {
3713 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3714 mInstantAECSettledFrameNumber)) {
3715 camera3_notify_msg_t notify_msg = {};
3716 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003717 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003718 QCamera3ProcessingChannel *channel =
3719 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003720 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003721 if (p_cam_frame_drop) {
3722 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003723 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003724 // Got the stream ID for drop frame.
3725 dropFrame = true;
3726 break;
3727 }
3728 }
3729 } else {
3730 // This is instant AEC case.
3731 // For instant AEC drop the stream untill AEC is settled.
3732 dropFrame = true;
3733 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003734
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003735 if (dropFrame) {
3736 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3737 if (p_cam_frame_drop) {
3738 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003739 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003740 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003741 } else {
3742 // For instant AEC, inform frame drop and frame number
3743 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3744 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003745 pendingRequest.frame_number, streamID,
3746 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003747 }
3748 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003749 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003750 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003751 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003752 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003753 if (p_cam_frame_drop) {
3754 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003755 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003756 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003757 } else {
3758 // For instant AEC, inform frame drop and frame number
3759 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3760 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003761 pendingRequest.frame_number, streamID,
3762 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003763 }
3764 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003765 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003766 PendingFrameDrop.stream_ID = streamID;
3767 // Add the Frame drop info to mPendingFrameDropList
3768 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003769 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003770 }
3771 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003772 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003773
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003774 for (auto & pendingRequest : mPendingRequestsList) {
3775 // Find the pending request with the frame number.
3776 if (pendingRequest.frame_number == frame_number) {
3777 // Update the sensor timestamp.
3778 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003779
Thierry Strudel3d639192016-09-09 11:52:26 -07003780
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003781 /* Set the timestamp in display metadata so that clients aware of
3782 private_handle such as VT can use this un-modified timestamps.
3783 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003784 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003785
Thierry Strudel3d639192016-09-09 11:52:26 -07003786 // Find channel requiring metadata, meaning internal offline postprocess
3787 // is needed.
3788 //TODO: for now, we don't support two streams requiring metadata at the same time.
3789 // (because we are not making copies, and metadata buffer is not reference counted.
3790 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003791 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3792 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003793 if (iter->need_metadata) {
3794 internalPproc = true;
3795 QCamera3ProcessingChannel *channel =
3796 (QCamera3ProcessingChannel *)iter->stream->priv;
3797 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003798 if(p_is_metabuf_queued != NULL) {
3799 *p_is_metabuf_queued = true;
3800 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003801 break;
3802 }
3803 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003804 for (auto itr = pendingRequest.internalRequestList.begin();
3805 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003806 if (itr->need_metadata) {
3807 internalPproc = true;
3808 QCamera3ProcessingChannel *channel =
3809 (QCamera3ProcessingChannel *)itr->stream->priv;
3810 channel->queueReprocMetadata(metadata_buf);
3811 break;
3812 }
3813 }
3814
Thierry Strudel54dc9782017-02-15 12:12:10 -08003815 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003816
3817 bool *enableZsl = nullptr;
3818 if (gExposeEnableZslKey) {
3819 enableZsl = &pendingRequest.enableZsl;
3820 }
3821
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003822 resultMetadata = translateFromHalMetadata(metadata,
3823 pendingRequest.timestamp, pendingRequest.request_id,
3824 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3825 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003826 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003827 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003828 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003829 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003830 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003831 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003832
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003833 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003834
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003835 if (pendingRequest.blob_request) {
3836 //Dump tuning metadata if enabled and available
3837 char prop[PROPERTY_VALUE_MAX];
3838 memset(prop, 0, sizeof(prop));
3839 property_get("persist.camera.dumpmetadata", prop, "0");
3840 int32_t enabled = atoi(prop);
3841 if (enabled && metadata->is_tuning_params_valid) {
3842 dumpMetadataToFile(metadata->tuning_params,
3843 mMetaFrameCount,
3844 enabled,
3845 "Snapshot",
3846 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003847 }
3848 }
3849
3850 if (!internalPproc) {
3851 LOGD("couldn't find need_metadata for this metadata");
3852 // Return metadata buffer
3853 if (free_and_bufdone_meta_buf) {
3854 mMetadataChannel->bufDone(metadata_buf);
3855 free(metadata_buf);
3856 }
3857 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003858
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003859 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003860 }
3861 }
3862
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003863 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3864
3865 // Try to send out capture result metadata.
3866 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003867 return;
3868
Thierry Strudel3d639192016-09-09 11:52:26 -07003869done_metadata:
3870 for (pendingRequestIterator i = mPendingRequestsList.begin();
3871 i != mPendingRequestsList.end() ;i++) {
3872 i->pipeline_depth++;
3873 }
3874 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3875 unblockRequestIfNecessary();
3876}
3877
3878/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003879 * FUNCTION : handleDepthDataWithLock
3880 *
3881 * DESCRIPTION: Handles incoming depth data
3882 *
3883 * PARAMETERS : @depthData : Depth data
3884 * @frameNumber: Frame number of the incoming depth data
3885 *
3886 * RETURN :
3887 *
3888 *==========================================================================*/
3889void QCamera3HardwareInterface::handleDepthDataLocked(
3890 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3891 uint32_t currentFrameNumber;
3892 buffer_handle_t *depthBuffer;
3893
3894 if (nullptr == mDepthChannel) {
3895 LOGE("Depth channel not present!");
3896 return;
3897 }
3898
3899 camera3_stream_buffer_t resultBuffer =
3900 {.acquire_fence = -1,
3901 .release_fence = -1,
3902 .status = CAMERA3_BUFFER_STATUS_OK,
3903 .buffer = nullptr,
3904 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003905 do {
3906 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3907 if (nullptr == depthBuffer) {
3908 break;
3909 }
3910
Emilian Peev7650c122017-01-19 08:24:33 -08003911 resultBuffer.buffer = depthBuffer;
3912 if (currentFrameNumber == frameNumber) {
3913 int32_t rc = mDepthChannel->populateDepthData(depthData,
3914 frameNumber);
3915 if (NO_ERROR != rc) {
3916 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3917 } else {
3918 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3919 }
3920 } else if (currentFrameNumber > frameNumber) {
3921 break;
3922 } else {
3923 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3924 {{currentFrameNumber, mDepthChannel->getStream(),
3925 CAMERA3_MSG_ERROR_BUFFER}}};
3926 orchestrateNotify(&notify_msg);
3927
3928 LOGE("Depth buffer for frame number: %d is missing "
3929 "returning back!", currentFrameNumber);
3930 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3931 }
3932 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003933 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003934 } while (currentFrameNumber < frameNumber);
3935}
3936
3937/*===========================================================================
3938 * FUNCTION : notifyErrorFoPendingDepthData
3939 *
3940 * DESCRIPTION: Returns error for any pending depth buffers
3941 *
3942 * PARAMETERS : depthCh - depth channel that needs to get flushed
3943 *
3944 * RETURN :
3945 *
3946 *==========================================================================*/
3947void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3948 QCamera3DepthChannel *depthCh) {
3949 uint32_t currentFrameNumber;
3950 buffer_handle_t *depthBuffer;
3951
3952 if (nullptr == depthCh) {
3953 return;
3954 }
3955
3956 camera3_notify_msg_t notify_msg =
3957 {.type = CAMERA3_MSG_ERROR,
3958 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3959 camera3_stream_buffer_t resultBuffer =
3960 {.acquire_fence = -1,
3961 .release_fence = -1,
3962 .buffer = nullptr,
3963 .stream = depthCh->getStream(),
3964 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003965
3966 while (nullptr !=
3967 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3968 depthCh->unmapBuffer(currentFrameNumber);
3969
3970 notify_msg.message.error.frame_number = currentFrameNumber;
3971 orchestrateNotify(&notify_msg);
3972
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003973 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003974 };
3975}
3976
3977/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003978 * FUNCTION : hdrPlusPerfLock
3979 *
3980 * DESCRIPTION: perf lock for HDR+ using custom intent
3981 *
3982 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3983 *
3984 * RETURN : None
3985 *
3986 *==========================================================================*/
3987void QCamera3HardwareInterface::hdrPlusPerfLock(
3988 mm_camera_super_buf_t *metadata_buf)
3989{
3990 if (NULL == metadata_buf) {
3991 LOGE("metadata_buf is NULL");
3992 return;
3993 }
3994 metadata_buffer_t *metadata =
3995 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3996 int32_t *p_frame_number_valid =
3997 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3998 uint32_t *p_frame_number =
3999 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4000
4001 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4002 LOGE("%s: Invalid metadata", __func__);
4003 return;
4004 }
4005
4006 //acquire perf lock for 5 sec after the last HDR frame is captured
4007 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4008 if ((p_frame_number != NULL) &&
4009 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004010 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004011 }
4012 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004013}
4014
4015/*===========================================================================
4016 * FUNCTION : handleInputBufferWithLock
4017 *
4018 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4019 *
4020 * PARAMETERS : @frame_number: frame number of the input buffer
4021 *
4022 * RETURN :
4023 *
4024 *==========================================================================*/
4025void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4026{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004027 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004028 pendingRequestIterator i = mPendingRequestsList.begin();
4029 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4030 i++;
4031 }
4032 if (i != mPendingRequestsList.end() && i->input_buffer) {
4033 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004034 CameraMetadata settings;
4035 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4036 if(i->settings) {
4037 settings = i->settings;
4038 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4039 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004040 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004041 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004042 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004043 } else {
4044 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004045 }
4046
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004047 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4048 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4049 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004050
4051 camera3_capture_result result;
4052 memset(&result, 0, sizeof(camera3_capture_result));
4053 result.frame_number = frame_number;
4054 result.result = i->settings;
4055 result.input_buffer = i->input_buffer;
4056 result.partial_result = PARTIAL_RESULT_COUNT;
4057
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004058 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004059 LOGD("Input request metadata and input buffer frame_number = %u",
4060 i->frame_number);
4061 i = erasePendingRequest(i);
4062 } else {
4063 LOGE("Could not find input request for frame number %d", frame_number);
4064 }
4065}
4066
4067/*===========================================================================
4068 * FUNCTION : handleBufferWithLock
4069 *
4070 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4071 *
4072 * PARAMETERS : @buffer: image buffer for the callback
4073 * @frame_number: frame number of the image buffer
4074 *
4075 * RETURN :
4076 *
4077 *==========================================================================*/
4078void QCamera3HardwareInterface::handleBufferWithLock(
4079 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4080{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004081 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004082
4083 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4084 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4085 }
4086
Thierry Strudel3d639192016-09-09 11:52:26 -07004087 /* Nothing to be done during error state */
4088 if ((ERROR == mState) || (DEINIT == mState)) {
4089 return;
4090 }
4091 if (mFlushPerf) {
4092 handleBuffersDuringFlushLock(buffer);
4093 return;
4094 }
4095 //not in flush
4096 // If the frame number doesn't exist in the pending request list,
4097 // directly send the buffer to the frameworks, and update pending buffers map
4098 // Otherwise, book-keep the buffer.
4099 pendingRequestIterator i = mPendingRequestsList.begin();
4100 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4101 i++;
4102 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004103
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004104 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004105 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004106 // For a reprocessing request, try to send out result metadata.
4107 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004108 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004109 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004110
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004111 // Check if this frame was dropped.
4112 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4113 m != mPendingFrameDropList.end(); m++) {
4114 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4115 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4116 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4117 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4118 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4119 frame_number, streamID);
4120 m = mPendingFrameDropList.erase(m);
4121 break;
4122 }
4123 }
4124
4125 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4126 LOGH("result frame_number = %d, buffer = %p",
4127 frame_number, buffer->buffer);
4128
4129 mPendingBuffersMap.removeBuf(buffer->buffer);
4130 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4131
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004132 if (mPreviewStarted == false) {
4133 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4134 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004135 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4136
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004137 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4138 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4139 mPreviewStarted = true;
4140
4141 // Set power hint for preview
4142 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4143 }
4144 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004145}
4146
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004147void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004148 const camera_metadata_t *resultMetadata)
4149{
4150 // Find the pending request for this result metadata.
4151 auto requestIter = mPendingRequestsList.begin();
4152 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4153 requestIter++;
4154 }
4155
4156 if (requestIter == mPendingRequestsList.end()) {
4157 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4158 return;
4159 }
4160
4161 // Update the result metadata
4162 requestIter->resultMetadata = resultMetadata;
4163
4164 // Check what type of request this is.
4165 bool liveRequest = false;
4166 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004167 // HDR+ request doesn't have partial results.
4168 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004169 } else if (requestIter->input_buffer != nullptr) {
4170 // Reprocessing request result is the same as settings.
4171 requestIter->resultMetadata = requestIter->settings;
4172 // Reprocessing request doesn't have partial results.
4173 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4174 } else {
4175 liveRequest = true;
4176 requestIter->partial_result_cnt++;
4177 mPendingLiveRequest--;
4178
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004179 {
4180 Mutex::Autolock l(gHdrPlusClientLock);
4181 // For a live request, send the metadata to HDR+ client.
4182 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4183 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4184 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4185 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004186 }
4187 }
4188
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004189 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4190 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004191 bool readyToSend = true;
4192
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004193 // Iterate through the pending requests to send out result metadata that are ready. Also if
4194 // this result metadata belongs to a live request, notify errors for previous live requests
4195 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004196 auto iter = mPendingRequestsList.begin();
4197 while (iter != mPendingRequestsList.end()) {
4198 // Check if current pending request is ready. If it's not ready, the following pending
4199 // requests are also not ready.
4200 if (readyToSend && iter->resultMetadata == nullptr) {
4201 readyToSend = false;
4202 }
4203
4204 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4205
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004206 camera3_capture_result_t result = {};
4207 result.frame_number = iter->frame_number;
4208 result.result = iter->resultMetadata;
4209 result.partial_result = iter->partial_result_cnt;
4210
4211 // If this pending buffer has result metadata, we may be able to send out shutter callback
4212 // and result metadata.
4213 if (iter->resultMetadata != nullptr) {
4214 if (!readyToSend) {
4215 // If any of the previous pending request is not ready, this pending request is
4216 // also not ready to send in order to keep shutter callbacks and result metadata
4217 // in order.
4218 iter++;
4219 continue;
4220 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004221 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4222 // If the result metadata belongs to a live request, notify errors for previous pending
4223 // live requests.
4224 mPendingLiveRequest--;
4225
4226 CameraMetadata dummyMetadata;
4227 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4228 result.result = dummyMetadata.release();
4229
4230 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004231
4232 // partial_result should be PARTIAL_RESULT_CNT in case of
4233 // ERROR_RESULT.
4234 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4235 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004236 } else {
4237 iter++;
4238 continue;
4239 }
4240
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004241 result.output_buffers = nullptr;
4242 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004243 orchestrateResult(&result);
4244
4245 // For reprocessing, result metadata is the same as settings so do not free it here to
4246 // avoid double free.
4247 if (result.result != iter->settings) {
4248 free_camera_metadata((camera_metadata_t *)result.result);
4249 }
4250 iter->resultMetadata = nullptr;
4251 iter = erasePendingRequest(iter);
4252 }
4253
4254 if (liveRequest) {
4255 for (auto &iter : mPendingRequestsList) {
4256 // Increment pipeline depth for the following pending requests.
4257 if (iter.frame_number > frameNumber) {
4258 iter.pipeline_depth++;
4259 }
4260 }
4261 }
4262
4263 unblockRequestIfNecessary();
4264}
4265
Thierry Strudel3d639192016-09-09 11:52:26 -07004266/*===========================================================================
4267 * FUNCTION : unblockRequestIfNecessary
4268 *
4269 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4270 * that mMutex is held when this function is called.
4271 *
4272 * PARAMETERS :
4273 *
4274 * RETURN :
4275 *
4276 *==========================================================================*/
4277void QCamera3HardwareInterface::unblockRequestIfNecessary()
4278{
4279 // Unblock process_capture_request
4280 pthread_cond_signal(&mRequestCond);
4281}
4282
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004283/*===========================================================================
4284 * FUNCTION : isHdrSnapshotRequest
4285 *
4286 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4287 *
4288 * PARAMETERS : camera3 request structure
4289 *
4290 * RETURN : boolean decision variable
4291 *
4292 *==========================================================================*/
4293bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4294{
4295 if (request == NULL) {
4296 LOGE("Invalid request handle");
4297 assert(0);
4298 return false;
4299 }
4300
4301 if (!mForceHdrSnapshot) {
4302 CameraMetadata frame_settings;
4303 frame_settings = request->settings;
4304
4305 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4306 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4307 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4308 return false;
4309 }
4310 } else {
4311 return false;
4312 }
4313
4314 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4315 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4316 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4317 return false;
4318 }
4319 } else {
4320 return false;
4321 }
4322 }
4323
4324 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4325 if (request->output_buffers[i].stream->format
4326 == HAL_PIXEL_FORMAT_BLOB) {
4327 return true;
4328 }
4329 }
4330
4331 return false;
4332}
4333/*===========================================================================
4334 * FUNCTION : orchestrateRequest
4335 *
4336 * DESCRIPTION: Orchestrates a capture request from camera service
4337 *
4338 * PARAMETERS :
4339 * @request : request from framework to process
4340 *
4341 * RETURN : Error status codes
4342 *
4343 *==========================================================================*/
4344int32_t QCamera3HardwareInterface::orchestrateRequest(
4345 camera3_capture_request_t *request)
4346{
4347
4348 uint32_t originalFrameNumber = request->frame_number;
4349 uint32_t originalOutputCount = request->num_output_buffers;
4350 const camera_metadata_t *original_settings = request->settings;
4351 List<InternalRequest> internallyRequestedStreams;
4352 List<InternalRequest> emptyInternalList;
4353
4354 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4355 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4356 uint32_t internalFrameNumber;
4357 CameraMetadata modified_meta;
4358
4359
4360 /* Add Blob channel to list of internally requested streams */
4361 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4362 if (request->output_buffers[i].stream->format
4363 == HAL_PIXEL_FORMAT_BLOB) {
4364 InternalRequest streamRequested;
4365 streamRequested.meteringOnly = 1;
4366 streamRequested.need_metadata = 0;
4367 streamRequested.stream = request->output_buffers[i].stream;
4368 internallyRequestedStreams.push_back(streamRequested);
4369 }
4370 }
4371 request->num_output_buffers = 0;
4372 auto itr = internallyRequestedStreams.begin();
4373
4374 /* Modify setting to set compensation */
4375 modified_meta = request->settings;
4376 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4377 uint8_t aeLock = 1;
4378 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4379 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4380 camera_metadata_t *modified_settings = modified_meta.release();
4381 request->settings = modified_settings;
4382
4383 /* Capture Settling & -2x frame */
4384 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4385 request->frame_number = internalFrameNumber;
4386 processCaptureRequest(request, internallyRequestedStreams);
4387
4388 request->num_output_buffers = originalOutputCount;
4389 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4390 request->frame_number = internalFrameNumber;
4391 processCaptureRequest(request, emptyInternalList);
4392 request->num_output_buffers = 0;
4393
4394 modified_meta = modified_settings;
4395 expCompensation = 0;
4396 aeLock = 1;
4397 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4398 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4399 modified_settings = modified_meta.release();
4400 request->settings = modified_settings;
4401
4402 /* Capture Settling & 0X frame */
4403
4404 itr = internallyRequestedStreams.begin();
4405 if (itr == internallyRequestedStreams.end()) {
4406 LOGE("Error Internally Requested Stream list is empty");
4407 assert(0);
4408 } else {
4409 itr->need_metadata = 0;
4410 itr->meteringOnly = 1;
4411 }
4412
4413 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4414 request->frame_number = internalFrameNumber;
4415 processCaptureRequest(request, internallyRequestedStreams);
4416
4417 itr = internallyRequestedStreams.begin();
4418 if (itr == internallyRequestedStreams.end()) {
4419 ALOGE("Error Internally Requested Stream list is empty");
4420 assert(0);
4421 } else {
4422 itr->need_metadata = 1;
4423 itr->meteringOnly = 0;
4424 }
4425
4426 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4427 request->frame_number = internalFrameNumber;
4428 processCaptureRequest(request, internallyRequestedStreams);
4429
4430 /* Capture 2X frame*/
4431 modified_meta = modified_settings;
4432 expCompensation = GB_HDR_2X_STEP_EV;
4433 aeLock = 1;
4434 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4435 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4436 modified_settings = modified_meta.release();
4437 request->settings = modified_settings;
4438
4439 itr = internallyRequestedStreams.begin();
4440 if (itr == internallyRequestedStreams.end()) {
4441 ALOGE("Error Internally Requested Stream list is empty");
4442 assert(0);
4443 } else {
4444 itr->need_metadata = 0;
4445 itr->meteringOnly = 1;
4446 }
4447 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4448 request->frame_number = internalFrameNumber;
4449 processCaptureRequest(request, internallyRequestedStreams);
4450
4451 itr = internallyRequestedStreams.begin();
4452 if (itr == internallyRequestedStreams.end()) {
4453 ALOGE("Error Internally Requested Stream list is empty");
4454 assert(0);
4455 } else {
4456 itr->need_metadata = 1;
4457 itr->meteringOnly = 0;
4458 }
4459
4460 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4461 request->frame_number = internalFrameNumber;
4462 processCaptureRequest(request, internallyRequestedStreams);
4463
4464
4465 /* Capture 2X on original streaming config*/
4466 internallyRequestedStreams.clear();
4467
4468 /* Restore original settings pointer */
4469 request->settings = original_settings;
4470 } else {
4471 uint32_t internalFrameNumber;
4472 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4473 request->frame_number = internalFrameNumber;
4474 return processCaptureRequest(request, internallyRequestedStreams);
4475 }
4476
4477 return NO_ERROR;
4478}
4479
4480/*===========================================================================
4481 * FUNCTION : orchestrateResult
4482 *
4483 * DESCRIPTION: Orchestrates a capture result to camera service
4484 *
4485 * PARAMETERS :
4486 * @request : request from framework to process
4487 *
4488 * RETURN :
4489 *
4490 *==========================================================================*/
4491void QCamera3HardwareInterface::orchestrateResult(
4492 camera3_capture_result_t *result)
4493{
4494 uint32_t frameworkFrameNumber;
4495 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4496 frameworkFrameNumber);
4497 if (rc != NO_ERROR) {
4498 LOGE("Cannot find translated frameworkFrameNumber");
4499 assert(0);
4500 } else {
4501 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004502 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004503 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004504 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004505 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4506 camera_metadata_entry_t entry;
4507 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4508 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004509 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004510 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4511 if (ret != OK)
4512 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004513 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004514 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004515 result->frame_number = frameworkFrameNumber;
4516 mCallbackOps->process_capture_result(mCallbackOps, result);
4517 }
4518 }
4519}
4520
4521/*===========================================================================
4522 * FUNCTION : orchestrateNotify
4523 *
4524 * DESCRIPTION: Orchestrates a notify to camera service
4525 *
4526 * PARAMETERS :
4527 * @request : request from framework to process
4528 *
4529 * RETURN :
4530 *
4531 *==========================================================================*/
4532void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4533{
4534 uint32_t frameworkFrameNumber;
4535 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004536 int32_t rc = NO_ERROR;
4537
4538 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004539 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004540
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004541 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004542 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4543 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4544 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004545 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004546 LOGE("Cannot find translated frameworkFrameNumber");
4547 assert(0);
4548 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004549 }
4550 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004551
4552 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4553 LOGD("Internal Request drop the notifyCb");
4554 } else {
4555 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4556 mCallbackOps->notify(mCallbackOps, notify_msg);
4557 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004558}
4559
4560/*===========================================================================
4561 * FUNCTION : FrameNumberRegistry
4562 *
4563 * DESCRIPTION: Constructor
4564 *
4565 * PARAMETERS :
4566 *
4567 * RETURN :
4568 *
4569 *==========================================================================*/
4570FrameNumberRegistry::FrameNumberRegistry()
4571{
4572 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4573}
4574
4575/*===========================================================================
4576 * FUNCTION : ~FrameNumberRegistry
4577 *
4578 * DESCRIPTION: Destructor
4579 *
4580 * PARAMETERS :
4581 *
4582 * RETURN :
4583 *
4584 *==========================================================================*/
4585FrameNumberRegistry::~FrameNumberRegistry()
4586{
4587}
4588
4589/*===========================================================================
4590 * FUNCTION : PurgeOldEntriesLocked
4591 *
4592 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4593 *
4594 * PARAMETERS :
4595 *
4596 * RETURN : NONE
4597 *
4598 *==========================================================================*/
4599void FrameNumberRegistry::purgeOldEntriesLocked()
4600{
4601 while (_register.begin() != _register.end()) {
4602 auto itr = _register.begin();
4603 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4604 _register.erase(itr);
4605 } else {
4606 return;
4607 }
4608 }
4609}
4610
4611/*===========================================================================
4612 * FUNCTION : allocStoreInternalFrameNumber
4613 *
4614 * DESCRIPTION: Method to note down a framework request and associate a new
4615 * internal request number against it
4616 *
4617 * PARAMETERS :
4618 * @fFrameNumber: Identifier given by framework
4619 * @internalFN : Output parameter which will have the newly generated internal
4620 * entry
4621 *
4622 * RETURN : Error code
4623 *
4624 *==========================================================================*/
4625int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4626 uint32_t &internalFrameNumber)
4627{
4628 Mutex::Autolock lock(mRegistryLock);
4629 internalFrameNumber = _nextFreeInternalNumber++;
4630 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4631 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4632 purgeOldEntriesLocked();
4633 return NO_ERROR;
4634}
4635
4636/*===========================================================================
4637 * FUNCTION : generateStoreInternalFrameNumber
4638 *
4639 * DESCRIPTION: Method to associate a new internal request number independent
4640 * of any associate with framework requests
4641 *
4642 * PARAMETERS :
4643 * @internalFrame#: Output parameter which will have the newly generated internal
4644 *
4645 *
4646 * RETURN : Error code
4647 *
4648 *==========================================================================*/
4649int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4650{
4651 Mutex::Autolock lock(mRegistryLock);
4652 internalFrameNumber = _nextFreeInternalNumber++;
4653 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4654 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4655 purgeOldEntriesLocked();
4656 return NO_ERROR;
4657}
4658
4659/*===========================================================================
4660 * FUNCTION : getFrameworkFrameNumber
4661 *
4662 * DESCRIPTION: Method to query the framework framenumber given an internal #
4663 *
4664 * PARAMETERS :
4665 * @internalFrame#: Internal reference
4666 * @frameworkframenumber: Output parameter holding framework frame entry
4667 *
4668 * RETURN : Error code
4669 *
4670 *==========================================================================*/
4671int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4672 uint32_t &frameworkFrameNumber)
4673{
4674 Mutex::Autolock lock(mRegistryLock);
4675 auto itr = _register.find(internalFrameNumber);
4676 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004677 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004678 return -ENOENT;
4679 }
4680
4681 frameworkFrameNumber = itr->second;
4682 purgeOldEntriesLocked();
4683 return NO_ERROR;
4684}
Thierry Strudel3d639192016-09-09 11:52:26 -07004685
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004686status_t QCamera3HardwareInterface::fillPbStreamConfig(
4687 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4688 QCamera3Channel *channel, uint32_t streamIndex) {
4689 if (config == nullptr) {
4690 LOGE("%s: config is null", __FUNCTION__);
4691 return BAD_VALUE;
4692 }
4693
4694 if (channel == nullptr) {
4695 LOGE("%s: channel is null", __FUNCTION__);
4696 return BAD_VALUE;
4697 }
4698
4699 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4700 if (stream == nullptr) {
4701 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4702 return NAME_NOT_FOUND;
4703 }
4704
4705 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4706 if (streamInfo == nullptr) {
4707 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4708 return NAME_NOT_FOUND;
4709 }
4710
4711 config->id = pbStreamId;
4712 config->image.width = streamInfo->dim.width;
4713 config->image.height = streamInfo->dim.height;
4714 config->image.padding = 0;
4715 config->image.format = pbStreamFormat;
4716
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004717 uint32_t totalPlaneSize = 0;
4718
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004719 // Fill plane information.
4720 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4721 pbcamera::PlaneConfiguration plane;
4722 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4723 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4724 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004725
4726 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004727 }
4728
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004729 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004730 return OK;
4731}
4732
Thierry Strudel3d639192016-09-09 11:52:26 -07004733/*===========================================================================
4734 * FUNCTION : processCaptureRequest
4735 *
4736 * DESCRIPTION: process a capture request from camera service
4737 *
4738 * PARAMETERS :
4739 * @request : request from framework to process
4740 *
4741 * RETURN :
4742 *
4743 *==========================================================================*/
4744int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004745 camera3_capture_request_t *request,
4746 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004747{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004748 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004749 int rc = NO_ERROR;
4750 int32_t request_id;
4751 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004752 bool isVidBufRequested = false;
4753 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004754 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004755
4756 pthread_mutex_lock(&mMutex);
4757
4758 // Validate current state
4759 switch (mState) {
4760 case CONFIGURED:
4761 case STARTED:
4762 /* valid state */
4763 break;
4764
4765 case ERROR:
4766 pthread_mutex_unlock(&mMutex);
4767 handleCameraDeviceError();
4768 return -ENODEV;
4769
4770 default:
4771 LOGE("Invalid state %d", mState);
4772 pthread_mutex_unlock(&mMutex);
4773 return -ENODEV;
4774 }
4775
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004776 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004777 if (rc != NO_ERROR) {
4778 LOGE("incoming request is not valid");
4779 pthread_mutex_unlock(&mMutex);
4780 return rc;
4781 }
4782
4783 meta = request->settings;
4784
4785 // For first capture request, send capture intent, and
4786 // stream on all streams
4787 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004788 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004789 // send an unconfigure to the backend so that the isp
4790 // resources are deallocated
4791 if (!mFirstConfiguration) {
4792 cam_stream_size_info_t stream_config_info;
4793 int32_t hal_version = CAM_HAL_V3;
4794 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4795 stream_config_info.buffer_info.min_buffers =
4796 MIN_INFLIGHT_REQUESTS;
4797 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004798 m_bIs4KVideo ? 0 :
4799 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004800 clear_metadata_buffer(mParameters);
4801 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4802 CAM_INTF_PARM_HAL_VERSION, hal_version);
4803 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4804 CAM_INTF_META_STREAM_INFO, stream_config_info);
4805 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4806 mParameters);
4807 if (rc < 0) {
4808 LOGE("set_parms for unconfigure failed");
4809 pthread_mutex_unlock(&mMutex);
4810 return rc;
4811 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004812
Thierry Strudel3d639192016-09-09 11:52:26 -07004813 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004814 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004815 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004816 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004817 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004818 property_get("persist.camera.is_type", is_type_value, "4");
4819 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4820 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4821 property_get("persist.camera.is_type_preview", is_type_value, "4");
4822 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4823 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004824
4825 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4826 int32_t hal_version = CAM_HAL_V3;
4827 uint8_t captureIntent =
4828 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4829 mCaptureIntent = captureIntent;
4830 clear_metadata_buffer(mParameters);
4831 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4832 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4833 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004834 if (mFirstConfiguration) {
4835 // configure instant AEC
4836 // Instant AEC is a session based parameter and it is needed only
4837 // once per complete session after open camera.
4838 // i.e. This is set only once for the first capture request, after open camera.
4839 setInstantAEC(meta);
4840 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004841 uint8_t fwkVideoStabMode=0;
4842 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4843 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4844 }
4845
Xue Tuecac74e2017-04-17 13:58:15 -07004846 // If EIS setprop is enabled then only turn it on for video/preview
4847 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004848 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004849 int32_t vsMode;
4850 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4852 rc = BAD_VALUE;
4853 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004854 LOGD("setEis %d", setEis);
4855 bool eis3Supported = false;
4856 size_t count = IS_TYPE_MAX;
4857 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4858 for (size_t i = 0; i < count; i++) {
4859 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4860 eis3Supported = true;
4861 break;
4862 }
4863 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004864
4865 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004866 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004867 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4868 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004869 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4870 is_type = isTypePreview;
4871 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4872 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4873 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004874 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004875 } else {
4876 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004877 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004878 } else {
4879 is_type = IS_TYPE_NONE;
4880 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004881 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004882 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004883 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4884 }
4885 }
4886
4887 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4888 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4889
Thierry Strudel54dc9782017-02-15 12:12:10 -08004890 //Disable tintless only if the property is set to 0
4891 memset(prop, 0, sizeof(prop));
4892 property_get("persist.camera.tintless.enable", prop, "1");
4893 int32_t tintless_value = atoi(prop);
4894
Thierry Strudel3d639192016-09-09 11:52:26 -07004895 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4896 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004897
Thierry Strudel3d639192016-09-09 11:52:26 -07004898 //Disable CDS for HFR mode or if DIS/EIS is on.
4899 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4900 //after every configure_stream
4901 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4902 (m_bIsVideo)) {
4903 int32_t cds = CAM_CDS_MODE_OFF;
4904 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4905 CAM_INTF_PARM_CDS_MODE, cds))
4906 LOGE("Failed to disable CDS for HFR mode");
4907
4908 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004909
4910 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4911 uint8_t* use_av_timer = NULL;
4912
4913 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004914 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004915 use_av_timer = &m_debug_avtimer;
4916 }
4917 else{
4918 use_av_timer =
4919 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004920 if (use_av_timer) {
4921 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4922 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004923 }
4924
4925 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4926 rc = BAD_VALUE;
4927 }
4928 }
4929
Thierry Strudel3d639192016-09-09 11:52:26 -07004930 setMobicat();
4931
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004932 uint8_t nrMode = 0;
4933 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4934 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4935 }
4936
Thierry Strudel3d639192016-09-09 11:52:26 -07004937 /* Set fps and hfr mode while sending meta stream info so that sensor
4938 * can configure appropriate streaming mode */
4939 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004940 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4941 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004942 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4943 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004944 if (rc == NO_ERROR) {
4945 int32_t max_fps =
4946 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004947 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004948 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4949 }
4950 /* For HFR, more buffers are dequeued upfront to improve the performance */
4951 if (mBatchSize) {
4952 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4953 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4954 }
4955 }
4956 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004957 LOGE("setHalFpsRange failed");
4958 }
4959 }
4960 if (meta.exists(ANDROID_CONTROL_MODE)) {
4961 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4962 rc = extractSceneMode(meta, metaMode, mParameters);
4963 if (rc != NO_ERROR) {
4964 LOGE("extractSceneMode failed");
4965 }
4966 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004967 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004968
Thierry Strudel04e026f2016-10-10 11:27:36 -07004969 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4970 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4971 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4972 rc = setVideoHdrMode(mParameters, vhdr);
4973 if (rc != NO_ERROR) {
4974 LOGE("setVideoHDR is failed");
4975 }
4976 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004977
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004978 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004979 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004980 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004981 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4982 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4983 sensorModeFullFov)) {
4984 rc = BAD_VALUE;
4985 }
4986 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004987 //TODO: validate the arguments, HSV scenemode should have only the
4988 //advertised fps ranges
4989
4990 /*set the capture intent, hal version, tintless, stream info,
4991 *and disenable parameters to the backend*/
4992 LOGD("set_parms META_STREAM_INFO " );
4993 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004994 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4995 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004996 mStreamConfigInfo.type[i],
4997 mStreamConfigInfo.stream_sizes[i].width,
4998 mStreamConfigInfo.stream_sizes[i].height,
4999 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005000 mStreamConfigInfo.format[i],
5001 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005002 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005003
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5005 mParameters);
5006 if (rc < 0) {
5007 LOGE("set_parms failed for hal version, stream info");
5008 }
5009
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005010 cam_sensor_mode_info_t sensorModeInfo = {};
5011 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005012 if (rc != NO_ERROR) {
5013 LOGE("Failed to get sensor output size");
5014 pthread_mutex_unlock(&mMutex);
5015 goto error_exit;
5016 }
5017
5018 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5019 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005020 sensorModeInfo.active_array_size.width,
5021 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005022
5023 /* Set batchmode before initializing channel. Since registerBuffer
5024 * internally initializes some of the channels, better set batchmode
5025 * even before first register buffer */
5026 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5027 it != mStreamInfo.end(); it++) {
5028 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5029 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5030 && mBatchSize) {
5031 rc = channel->setBatchSize(mBatchSize);
5032 //Disable per frame map unmap for HFR/batchmode case
5033 rc |= channel->setPerFrameMapUnmap(false);
5034 if (NO_ERROR != rc) {
5035 LOGE("Channel init failed %d", rc);
5036 pthread_mutex_unlock(&mMutex);
5037 goto error_exit;
5038 }
5039 }
5040 }
5041
5042 //First initialize all streams
5043 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5044 it != mStreamInfo.end(); it++) {
5045 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005046
5047 /* Initial value of NR mode is needed before stream on */
5048 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005049 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5050 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005051 setEis) {
5052 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5053 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5054 is_type = mStreamConfigInfo.is_type[i];
5055 break;
5056 }
5057 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005059 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005060 rc = channel->initialize(IS_TYPE_NONE);
5061 }
5062 if (NO_ERROR != rc) {
5063 LOGE("Channel initialization failed %d", rc);
5064 pthread_mutex_unlock(&mMutex);
5065 goto error_exit;
5066 }
5067 }
5068
5069 if (mRawDumpChannel) {
5070 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5071 if (rc != NO_ERROR) {
5072 LOGE("Error: Raw Dump Channel init failed");
5073 pthread_mutex_unlock(&mMutex);
5074 goto error_exit;
5075 }
5076 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005077 if (mHdrPlusRawSrcChannel) {
5078 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5079 if (rc != NO_ERROR) {
5080 LOGE("Error: HDR+ RAW Source Channel init failed");
5081 pthread_mutex_unlock(&mMutex);
5082 goto error_exit;
5083 }
5084 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005085 if (mSupportChannel) {
5086 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5087 if (rc < 0) {
5088 LOGE("Support channel initialization failed");
5089 pthread_mutex_unlock(&mMutex);
5090 goto error_exit;
5091 }
5092 }
5093 if (mAnalysisChannel) {
5094 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5095 if (rc < 0) {
5096 LOGE("Analysis channel initialization failed");
5097 pthread_mutex_unlock(&mMutex);
5098 goto error_exit;
5099 }
5100 }
5101 if (mDummyBatchChannel) {
5102 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5103 if (rc < 0) {
5104 LOGE("mDummyBatchChannel setBatchSize failed");
5105 pthread_mutex_unlock(&mMutex);
5106 goto error_exit;
5107 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005108 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005109 if (rc < 0) {
5110 LOGE("mDummyBatchChannel initialization failed");
5111 pthread_mutex_unlock(&mMutex);
5112 goto error_exit;
5113 }
5114 }
5115
5116 // Set bundle info
5117 rc = setBundleInfo();
5118 if (rc < 0) {
5119 LOGE("setBundleInfo failed %d", rc);
5120 pthread_mutex_unlock(&mMutex);
5121 goto error_exit;
5122 }
5123
5124 //update settings from app here
5125 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5126 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5127 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5128 }
5129 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5130 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5131 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5132 }
5133 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5134 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5135 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5136
5137 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5138 (mLinkedCameraId != mCameraId) ) {
5139 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5140 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005141 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005142 goto error_exit;
5143 }
5144 }
5145
5146 // add bundle related cameras
5147 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5148 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005149 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5150 &m_pDualCamCmdPtr->bundle_info;
5151 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005152 if (mIsDeviceLinked)
5153 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5154 else
5155 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5156
5157 pthread_mutex_lock(&gCamLock);
5158
5159 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5160 LOGE("Dualcam: Invalid Session Id ");
5161 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005162 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005163 goto error_exit;
5164 }
5165
5166 if (mIsMainCamera == 1) {
5167 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5168 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005169 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005170 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005171 // related session id should be session id of linked session
5172 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5173 } else {
5174 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5175 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005176 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005177 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005178 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5179 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005180 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005181 pthread_mutex_unlock(&gCamLock);
5182
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005183 rc = mCameraHandle->ops->set_dual_cam_cmd(
5184 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005185 if (rc < 0) {
5186 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005187 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005188 goto error_exit;
5189 }
5190 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005191 goto no_error;
5192error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005193 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005194 return rc;
5195no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005196 mWokenUpByDaemon = false;
5197 mPendingLiveRequest = 0;
5198 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005199 }
5200
5201 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005202 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005203
5204 if (mFlushPerf) {
5205 //we cannot accept any requests during flush
5206 LOGE("process_capture_request cannot proceed during flush");
5207 pthread_mutex_unlock(&mMutex);
5208 return NO_ERROR; //should return an error
5209 }
5210
5211 if (meta.exists(ANDROID_REQUEST_ID)) {
5212 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5213 mCurrentRequestId = request_id;
5214 LOGD("Received request with id: %d", request_id);
5215 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5216 LOGE("Unable to find request id field, \
5217 & no previous id available");
5218 pthread_mutex_unlock(&mMutex);
5219 return NAME_NOT_FOUND;
5220 } else {
5221 LOGD("Re-using old request id");
5222 request_id = mCurrentRequestId;
5223 }
5224
5225 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5226 request->num_output_buffers,
5227 request->input_buffer,
5228 frameNumber);
5229 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005230 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005231 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005232 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005233 uint32_t snapshotStreamId = 0;
5234 for (size_t i = 0; i < request->num_output_buffers; i++) {
5235 const camera3_stream_buffer_t& output = request->output_buffers[i];
5236 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5237
Emilian Peev7650c122017-01-19 08:24:33 -08005238 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5239 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005240 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005241 blob_request = 1;
5242 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5243 }
5244
5245 if (output.acquire_fence != -1) {
5246 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5247 close(output.acquire_fence);
5248 if (rc != OK) {
5249 LOGE("sync wait failed %d", rc);
5250 pthread_mutex_unlock(&mMutex);
5251 return rc;
5252 }
5253 }
5254
Emilian Peev0f3c3162017-03-15 12:57:46 +00005255 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5256 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005257 depthRequestPresent = true;
5258 continue;
5259 }
5260
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005261 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005262 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005263
5264 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5265 isVidBufRequested = true;
5266 }
5267 }
5268
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005269 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5270 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5271 itr++) {
5272 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5273 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5274 channel->getStreamID(channel->getStreamTypeMask());
5275
5276 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5277 isVidBufRequested = true;
5278 }
5279 }
5280
Thierry Strudel3d639192016-09-09 11:52:26 -07005281 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005282 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005283 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 }
5285 if (blob_request && mRawDumpChannel) {
5286 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005287 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005288 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005289 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005290 }
5291
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005292 {
5293 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5294 // Request a RAW buffer if
5295 // 1. mHdrPlusRawSrcChannel is valid.
5296 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5297 // 3. There is no pending HDR+ request.
5298 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5299 mHdrPlusPendingRequests.size() == 0) {
5300 streamsArray.stream_request[streamsArray.num_streams].streamID =
5301 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5302 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5303 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005304 }
5305
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005306 //extract capture intent
5307 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5308 mCaptureIntent =
5309 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5310 }
5311
5312 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5313 mCacMode =
5314 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5315 }
5316
5317 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005318 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005319
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005320 {
5321 Mutex::Autolock l(gHdrPlusClientLock);
5322 // If this request has a still capture intent, try to submit an HDR+ request.
5323 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5324 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5325 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5326 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005327 }
5328
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005329 if (hdrPlusRequest) {
5330 // For a HDR+ request, just set the frame parameters.
5331 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5332 if (rc < 0) {
5333 LOGE("fail to set frame parameters");
5334 pthread_mutex_unlock(&mMutex);
5335 return rc;
5336 }
5337 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005338 /* Parse the settings:
5339 * - For every request in NORMAL MODE
5340 * - For every request in HFR mode during preview only case
5341 * - For first request of every batch in HFR mode during video
5342 * recording. In batchmode the same settings except frame number is
5343 * repeated in each request of the batch.
5344 */
5345 if (!mBatchSize ||
5346 (mBatchSize && !isVidBufRequested) ||
5347 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005348 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005349 if (rc < 0) {
5350 LOGE("fail to set frame parameters");
5351 pthread_mutex_unlock(&mMutex);
5352 return rc;
5353 }
5354 }
5355 /* For batchMode HFR, setFrameParameters is not called for every
5356 * request. But only frame number of the latest request is parsed.
5357 * Keep track of first and last frame numbers in a batch so that
5358 * metadata for the frame numbers of batch can be duplicated in
5359 * handleBatchMetadta */
5360 if (mBatchSize) {
5361 if (!mToBeQueuedVidBufs) {
5362 //start of the batch
5363 mFirstFrameNumberInBatch = request->frame_number;
5364 }
5365 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5366 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5367 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005368 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005369 return BAD_VALUE;
5370 }
5371 }
5372 if (mNeedSensorRestart) {
5373 /* Unlock the mutex as restartSensor waits on the channels to be
5374 * stopped, which in turn calls stream callback functions -
5375 * handleBufferWithLock and handleMetadataWithLock */
5376 pthread_mutex_unlock(&mMutex);
5377 rc = dynamicUpdateMetaStreamInfo();
5378 if (rc != NO_ERROR) {
5379 LOGE("Restarting the sensor failed");
5380 return BAD_VALUE;
5381 }
5382 mNeedSensorRestart = false;
5383 pthread_mutex_lock(&mMutex);
5384 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005385 if(mResetInstantAEC) {
5386 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5387 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5388 mResetInstantAEC = false;
5389 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005390 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005391 if (request->input_buffer->acquire_fence != -1) {
5392 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5393 close(request->input_buffer->acquire_fence);
5394 if (rc != OK) {
5395 LOGE("input buffer sync wait failed %d", rc);
5396 pthread_mutex_unlock(&mMutex);
5397 return rc;
5398 }
5399 }
5400 }
5401
5402 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5403 mLastCustIntentFrmNum = frameNumber;
5404 }
5405 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005406 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005407 pendingRequestIterator latestRequest;
5408 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005409 pendingRequest.num_buffers = depthRequestPresent ?
5410 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005411 pendingRequest.request_id = request_id;
5412 pendingRequest.blob_request = blob_request;
5413 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005414 if (request->input_buffer) {
5415 pendingRequest.input_buffer =
5416 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5417 *(pendingRequest.input_buffer) = *(request->input_buffer);
5418 pInputBuffer = pendingRequest.input_buffer;
5419 } else {
5420 pendingRequest.input_buffer = NULL;
5421 pInputBuffer = NULL;
5422 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005423 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005424
5425 pendingRequest.pipeline_depth = 0;
5426 pendingRequest.partial_result_cnt = 0;
5427 extractJpegMetadata(mCurJpegMeta, request);
5428 pendingRequest.jpegMetadata = mCurJpegMeta;
5429 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005430 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005431 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5432 mHybridAeEnable =
5433 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5434 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005435
5436 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5437 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005438 /* DevCamDebug metadata processCaptureRequest */
5439 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5440 mDevCamDebugMetaEnable =
5441 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5442 }
5443 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5444 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005445
5446 //extract CAC info
5447 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5448 mCacMode =
5449 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5450 }
5451 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005452 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005453
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005454 // extract enableZsl info
5455 if (gExposeEnableZslKey) {
5456 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5457 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5458 mZslEnabled = pendingRequest.enableZsl;
5459 } else {
5460 pendingRequest.enableZsl = mZslEnabled;
5461 }
5462 }
5463
Thierry Strudel3d639192016-09-09 11:52:26 -07005464 PendingBuffersInRequest bufsForCurRequest;
5465 bufsForCurRequest.frame_number = frameNumber;
5466 // Mark current timestamp for the new request
5467 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005468 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005469
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005470 if (hdrPlusRequest) {
5471 // Save settings for this request.
5472 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5473 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5474
5475 // Add to pending HDR+ request queue.
5476 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5477 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5478
5479 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5480 }
5481
Thierry Strudel3d639192016-09-09 11:52:26 -07005482 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005483 if ((request->output_buffers[i].stream->data_space ==
5484 HAL_DATASPACE_DEPTH) &&
5485 (HAL_PIXEL_FORMAT_BLOB ==
5486 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005487 continue;
5488 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005489 RequestedBufferInfo requestedBuf;
5490 memset(&requestedBuf, 0, sizeof(requestedBuf));
5491 requestedBuf.stream = request->output_buffers[i].stream;
5492 requestedBuf.buffer = NULL;
5493 pendingRequest.buffers.push_back(requestedBuf);
5494
5495 // Add to buffer handle the pending buffers list
5496 PendingBufferInfo bufferInfo;
5497 bufferInfo.buffer = request->output_buffers[i].buffer;
5498 bufferInfo.stream = request->output_buffers[i].stream;
5499 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5500 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5501 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5502 frameNumber, bufferInfo.buffer,
5503 channel->getStreamTypeMask(), bufferInfo.stream->format);
5504 }
5505 // Add this request packet into mPendingBuffersMap
5506 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5507 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5508 mPendingBuffersMap.get_num_overall_buffers());
5509
5510 latestRequest = mPendingRequestsList.insert(
5511 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005512
5513 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5514 // for the frame number.
5515 mShutterDispatcher.expectShutter(frameNumber);
5516 for (size_t i = 0; i < request->num_output_buffers; i++) {
5517 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5518 }
5519
Thierry Strudel3d639192016-09-09 11:52:26 -07005520 if(mFlush) {
5521 LOGI("mFlush is true");
5522 pthread_mutex_unlock(&mMutex);
5523 return NO_ERROR;
5524 }
5525
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005526 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5527 // channel.
5528 if (!hdrPlusRequest) {
5529 int indexUsed;
5530 // Notify metadata channel we receive a request
5531 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005532
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005533 if(request->input_buffer != NULL){
5534 LOGD("Input request, frame_number %d", frameNumber);
5535 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5536 if (NO_ERROR != rc) {
5537 LOGE("fail to set reproc parameters");
5538 pthread_mutex_unlock(&mMutex);
5539 return rc;
5540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005541 }
5542
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005543 // Call request on other streams
5544 uint32_t streams_need_metadata = 0;
5545 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5546 for (size_t i = 0; i < request->num_output_buffers; i++) {
5547 const camera3_stream_buffer_t& output = request->output_buffers[i];
5548 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5549
5550 if (channel == NULL) {
5551 LOGW("invalid channel pointer for stream");
5552 continue;
5553 }
5554
5555 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5556 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5557 output.buffer, request->input_buffer, frameNumber);
5558 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005559 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005560 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5561 if (rc < 0) {
5562 LOGE("Fail to request on picture channel");
5563 pthread_mutex_unlock(&mMutex);
5564 return rc;
5565 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005566 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005567 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5568 assert(NULL != mDepthChannel);
5569 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005570
Emilian Peev7650c122017-01-19 08:24:33 -08005571 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5572 if (rc < 0) {
5573 LOGE("Fail to map on depth buffer");
5574 pthread_mutex_unlock(&mMutex);
5575 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005576 }
Emilian Peev7650c122017-01-19 08:24:33 -08005577 } else {
5578 LOGD("snapshot request with buffer %p, frame_number %d",
5579 output.buffer, frameNumber);
5580 if (!request->settings) {
5581 rc = channel->request(output.buffer, frameNumber,
5582 NULL, mPrevParameters, indexUsed);
5583 } else {
5584 rc = channel->request(output.buffer, frameNumber,
5585 NULL, mParameters, indexUsed);
5586 }
5587 if (rc < 0) {
5588 LOGE("Fail to request on picture channel");
5589 pthread_mutex_unlock(&mMutex);
5590 return rc;
5591 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005592
Emilian Peev7650c122017-01-19 08:24:33 -08005593 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5594 uint32_t j = 0;
5595 for (j = 0; j < streamsArray.num_streams; j++) {
5596 if (streamsArray.stream_request[j].streamID == streamId) {
5597 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5598 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5599 else
5600 streamsArray.stream_request[j].buf_index = indexUsed;
5601 break;
5602 }
5603 }
5604 if (j == streamsArray.num_streams) {
5605 LOGE("Did not find matching stream to update index");
5606 assert(0);
5607 }
5608
5609 pendingBufferIter->need_metadata = true;
5610 streams_need_metadata++;
5611 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005612 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005613 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5614 bool needMetadata = false;
5615 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5616 rc = yuvChannel->request(output.buffer, frameNumber,
5617 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5618 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005619 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005620 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005621 pthread_mutex_unlock(&mMutex);
5622 return rc;
5623 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005624
5625 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5626 uint32_t j = 0;
5627 for (j = 0; j < streamsArray.num_streams; j++) {
5628 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005629 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5630 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5631 else
5632 streamsArray.stream_request[j].buf_index = indexUsed;
5633 break;
5634 }
5635 }
5636 if (j == streamsArray.num_streams) {
5637 LOGE("Did not find matching stream to update index");
5638 assert(0);
5639 }
5640
5641 pendingBufferIter->need_metadata = needMetadata;
5642 if (needMetadata)
5643 streams_need_metadata += 1;
5644 LOGD("calling YUV channel request, need_metadata is %d",
5645 needMetadata);
5646 } else {
5647 LOGD("request with buffer %p, frame_number %d",
5648 output.buffer, frameNumber);
5649
5650 rc = channel->request(output.buffer, frameNumber, indexUsed);
5651
5652 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5653 uint32_t j = 0;
5654 for (j = 0; j < streamsArray.num_streams; j++) {
5655 if (streamsArray.stream_request[j].streamID == streamId) {
5656 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5657 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5658 else
5659 streamsArray.stream_request[j].buf_index = indexUsed;
5660 break;
5661 }
5662 }
5663 if (j == streamsArray.num_streams) {
5664 LOGE("Did not find matching stream to update index");
5665 assert(0);
5666 }
5667
5668 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5669 && mBatchSize) {
5670 mToBeQueuedVidBufs++;
5671 if (mToBeQueuedVidBufs == mBatchSize) {
5672 channel->queueBatchBuf();
5673 }
5674 }
5675 if (rc < 0) {
5676 LOGE("request failed");
5677 pthread_mutex_unlock(&mMutex);
5678 return rc;
5679 }
5680 }
5681 pendingBufferIter++;
5682 }
5683
5684 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5685 itr++) {
5686 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5687
5688 if (channel == NULL) {
5689 LOGE("invalid channel pointer for stream");
5690 assert(0);
5691 return BAD_VALUE;
5692 }
5693
5694 InternalRequest requestedStream;
5695 requestedStream = (*itr);
5696
5697
5698 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5699 LOGD("snapshot request internally input buffer %p, frame_number %d",
5700 request->input_buffer, frameNumber);
5701 if(request->input_buffer != NULL){
5702 rc = channel->request(NULL, frameNumber,
5703 pInputBuffer, &mReprocMeta, indexUsed, true,
5704 requestedStream.meteringOnly);
5705 if (rc < 0) {
5706 LOGE("Fail to request on picture channel");
5707 pthread_mutex_unlock(&mMutex);
5708 return rc;
5709 }
5710 } else {
5711 LOGD("snapshot request with frame_number %d", frameNumber);
5712 if (!request->settings) {
5713 rc = channel->request(NULL, frameNumber,
5714 NULL, mPrevParameters, indexUsed, true,
5715 requestedStream.meteringOnly);
5716 } else {
5717 rc = channel->request(NULL, frameNumber,
5718 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5719 }
5720 if (rc < 0) {
5721 LOGE("Fail to request on picture channel");
5722 pthread_mutex_unlock(&mMutex);
5723 return rc;
5724 }
5725
5726 if ((*itr).meteringOnly != 1) {
5727 requestedStream.need_metadata = 1;
5728 streams_need_metadata++;
5729 }
5730 }
5731
5732 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5733 uint32_t j = 0;
5734 for (j = 0; j < streamsArray.num_streams; j++) {
5735 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005736 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5737 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5738 else
5739 streamsArray.stream_request[j].buf_index = indexUsed;
5740 break;
5741 }
5742 }
5743 if (j == streamsArray.num_streams) {
5744 LOGE("Did not find matching stream to update index");
5745 assert(0);
5746 }
5747
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005748 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005749 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005750 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005751 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005752 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005753 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005754 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005755
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005756 //If 2 streams have need_metadata set to true, fail the request, unless
5757 //we copy/reference count the metadata buffer
5758 if (streams_need_metadata > 1) {
5759 LOGE("not supporting request in which two streams requires"
5760 " 2 HAL metadata for reprocessing");
5761 pthread_mutex_unlock(&mMutex);
5762 return -EINVAL;
5763 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005764
Emilian Peev656e4fa2017-06-02 16:47:04 +01005765 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5766 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5767 if (depthRequestPresent && mDepthChannel) {
5768 if (request->settings) {
5769 camera_metadata_ro_entry entry;
5770 if (find_camera_metadata_ro_entry(request->settings,
5771 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5772 if (entry.data.u8[0]) {
5773 pdafEnable = CAM_PD_DATA_ENABLED;
5774 } else {
5775 pdafEnable = CAM_PD_DATA_SKIP;
5776 }
5777 mDepthCloudMode = pdafEnable;
5778 } else {
5779 pdafEnable = mDepthCloudMode;
5780 }
5781 } else {
5782 pdafEnable = mDepthCloudMode;
5783 }
5784 }
5785
Emilian Peev7650c122017-01-19 08:24:33 -08005786 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5787 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5788 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5789 pthread_mutex_unlock(&mMutex);
5790 return BAD_VALUE;
5791 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005792
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005793 if (request->input_buffer == NULL) {
5794 /* Set the parameters to backend:
5795 * - For every request in NORMAL MODE
5796 * - For every request in HFR mode during preview only case
5797 * - Once every batch in HFR mode during video recording
5798 */
5799 if (!mBatchSize ||
5800 (mBatchSize && !isVidBufRequested) ||
5801 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5802 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5803 mBatchSize, isVidBufRequested,
5804 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005805
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005806 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5807 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5808 uint32_t m = 0;
5809 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5810 if (streamsArray.stream_request[k].streamID ==
5811 mBatchedStreamsArray.stream_request[m].streamID)
5812 break;
5813 }
5814 if (m == mBatchedStreamsArray.num_streams) {
5815 mBatchedStreamsArray.stream_request\
5816 [mBatchedStreamsArray.num_streams].streamID =
5817 streamsArray.stream_request[k].streamID;
5818 mBatchedStreamsArray.stream_request\
5819 [mBatchedStreamsArray.num_streams].buf_index =
5820 streamsArray.stream_request[k].buf_index;
5821 mBatchedStreamsArray.num_streams =
5822 mBatchedStreamsArray.num_streams + 1;
5823 }
5824 }
5825 streamsArray = mBatchedStreamsArray;
5826 }
5827 /* Update stream id of all the requested buffers */
5828 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5829 streamsArray)) {
5830 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005831 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005832 return BAD_VALUE;
5833 }
5834
5835 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5836 mParameters);
5837 if (rc < 0) {
5838 LOGE("set_parms failed");
5839 }
5840 /* reset to zero coz, the batch is queued */
5841 mToBeQueuedVidBufs = 0;
5842 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5843 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5844 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005845 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5846 uint32_t m = 0;
5847 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5848 if (streamsArray.stream_request[k].streamID ==
5849 mBatchedStreamsArray.stream_request[m].streamID)
5850 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005851 }
5852 if (m == mBatchedStreamsArray.num_streams) {
5853 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5854 streamID = streamsArray.stream_request[k].streamID;
5855 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5856 buf_index = streamsArray.stream_request[k].buf_index;
5857 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5858 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005859 }
5860 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005861 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005862
5863 // Start all streams after the first setting is sent, so that the
5864 // setting can be applied sooner: (0 + apply_delay)th frame.
5865 if (mState == CONFIGURED && mChannelHandle) {
5866 //Then start them.
5867 LOGH("Start META Channel");
5868 rc = mMetadataChannel->start();
5869 if (rc < 0) {
5870 LOGE("META channel start failed");
5871 pthread_mutex_unlock(&mMutex);
5872 return rc;
5873 }
5874
5875 if (mAnalysisChannel) {
5876 rc = mAnalysisChannel->start();
5877 if (rc < 0) {
5878 LOGE("Analysis channel start failed");
5879 mMetadataChannel->stop();
5880 pthread_mutex_unlock(&mMutex);
5881 return rc;
5882 }
5883 }
5884
5885 if (mSupportChannel) {
5886 rc = mSupportChannel->start();
5887 if (rc < 0) {
5888 LOGE("Support channel start failed");
5889 mMetadataChannel->stop();
5890 /* Although support and analysis are mutually exclusive today
5891 adding it in anycase for future proofing */
5892 if (mAnalysisChannel) {
5893 mAnalysisChannel->stop();
5894 }
5895 pthread_mutex_unlock(&mMutex);
5896 return rc;
5897 }
5898 }
5899 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5900 it != mStreamInfo.end(); it++) {
5901 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5902 LOGH("Start Processing Channel mask=%d",
5903 channel->getStreamTypeMask());
5904 rc = channel->start();
5905 if (rc < 0) {
5906 LOGE("channel start failed");
5907 pthread_mutex_unlock(&mMutex);
5908 return rc;
5909 }
5910 }
5911
5912 if (mRawDumpChannel) {
5913 LOGD("Starting raw dump stream");
5914 rc = mRawDumpChannel->start();
5915 if (rc != NO_ERROR) {
5916 LOGE("Error Starting Raw Dump Channel");
5917 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5918 it != mStreamInfo.end(); it++) {
5919 QCamera3Channel *channel =
5920 (QCamera3Channel *)(*it)->stream->priv;
5921 LOGH("Stopping Processing Channel mask=%d",
5922 channel->getStreamTypeMask());
5923 channel->stop();
5924 }
5925 if (mSupportChannel)
5926 mSupportChannel->stop();
5927 if (mAnalysisChannel) {
5928 mAnalysisChannel->stop();
5929 }
5930 mMetadataChannel->stop();
5931 pthread_mutex_unlock(&mMutex);
5932 return rc;
5933 }
5934 }
5935
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005936 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005937 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005938 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005939 if (rc != NO_ERROR) {
5940 LOGE("start_channel failed %d", rc);
5941 pthread_mutex_unlock(&mMutex);
5942 return rc;
5943 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005944
5945 {
5946 // Configure Easel for stream on.
5947 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005948
5949 // Now that sensor mode should have been selected, get the selected sensor mode
5950 // info.
5951 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5952 getCurrentSensorModeInfo(mSensorModeInfo);
5953
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005954 if (EaselManagerClientOpened) {
5955 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chena6c99062017-05-23 13:45:06 -07005956 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5957 /*enableIpu*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005958 if (rc != OK) {
5959 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5960 mCameraId, mSensorModeInfo.op_pixel_clk);
5961 pthread_mutex_unlock(&mMutex);
5962 return rc;
5963 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005964 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005965 }
5966 }
5967
5968 // Start sensor streaming.
5969 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5970 mChannelHandle);
5971 if (rc != NO_ERROR) {
5972 LOGE("start_sensor_stream_on failed %d", rc);
5973 pthread_mutex_unlock(&mMutex);
5974 return rc;
5975 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005976 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005977 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005978 }
5979
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005980 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07005981 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005982 Mutex::Autolock l(gHdrPlusClientLock);
5983 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5984 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5985 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5986 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5987 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5988 rc = enableHdrPlusModeLocked();
5989 if (rc != OK) {
5990 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5991 pthread_mutex_unlock(&mMutex);
5992 return rc;
5993 }
5994
5995 mFirstPreviewIntentSeen = true;
5996 }
5997 }
5998
Thierry Strudel3d639192016-09-09 11:52:26 -07005999 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6000
6001 mState = STARTED;
6002 // Added a timed condition wait
6003 struct timespec ts;
6004 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006005 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006006 if (rc < 0) {
6007 isValidTimeout = 0;
6008 LOGE("Error reading the real time clock!!");
6009 }
6010 else {
6011 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006012 int64_t timeout = 5;
6013 {
6014 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6015 // If there is a pending HDR+ request, the following requests may be blocked until the
6016 // HDR+ request is done. So allow a longer timeout.
6017 if (mHdrPlusPendingRequests.size() > 0) {
6018 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6019 }
6020 }
6021 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006022 }
6023 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006024 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006025 (mState != ERROR) && (mState != DEINIT)) {
6026 if (!isValidTimeout) {
6027 LOGD("Blocking on conditional wait");
6028 pthread_cond_wait(&mRequestCond, &mMutex);
6029 }
6030 else {
6031 LOGD("Blocking on timed conditional wait");
6032 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6033 if (rc == ETIMEDOUT) {
6034 rc = -ENODEV;
6035 LOGE("Unblocked on timeout!!!!");
6036 break;
6037 }
6038 }
6039 LOGD("Unblocked");
6040 if (mWokenUpByDaemon) {
6041 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006042 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006043 break;
6044 }
6045 }
6046 pthread_mutex_unlock(&mMutex);
6047
6048 return rc;
6049}
6050
6051/*===========================================================================
6052 * FUNCTION : dump
6053 *
6054 * DESCRIPTION:
6055 *
6056 * PARAMETERS :
6057 *
6058 *
6059 * RETURN :
6060 *==========================================================================*/
6061void QCamera3HardwareInterface::dump(int fd)
6062{
6063 pthread_mutex_lock(&mMutex);
6064 dprintf(fd, "\n Camera HAL3 information Begin \n");
6065
6066 dprintf(fd, "\nNumber of pending requests: %zu \n",
6067 mPendingRequestsList.size());
6068 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6069 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6070 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6071 for(pendingRequestIterator i = mPendingRequestsList.begin();
6072 i != mPendingRequestsList.end(); i++) {
6073 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6074 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6075 i->input_buffer);
6076 }
6077 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6078 mPendingBuffersMap.get_num_overall_buffers());
6079 dprintf(fd, "-------+------------------\n");
6080 dprintf(fd, " Frame | Stream type mask \n");
6081 dprintf(fd, "-------+------------------\n");
6082 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6083 for(auto &j : req.mPendingBufferList) {
6084 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6085 dprintf(fd, " %5d | %11d \n",
6086 req.frame_number, channel->getStreamTypeMask());
6087 }
6088 }
6089 dprintf(fd, "-------+------------------\n");
6090
6091 dprintf(fd, "\nPending frame drop list: %zu\n",
6092 mPendingFrameDropList.size());
6093 dprintf(fd, "-------+-----------\n");
6094 dprintf(fd, " Frame | Stream ID \n");
6095 dprintf(fd, "-------+-----------\n");
6096 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6097 i != mPendingFrameDropList.end(); i++) {
6098 dprintf(fd, " %5d | %9d \n",
6099 i->frame_number, i->stream_ID);
6100 }
6101 dprintf(fd, "-------+-----------\n");
6102
6103 dprintf(fd, "\n Camera HAL3 information End \n");
6104
6105 /* use dumpsys media.camera as trigger to send update debug level event */
6106 mUpdateDebugLevel = true;
6107 pthread_mutex_unlock(&mMutex);
6108 return;
6109}
6110
6111/*===========================================================================
6112 * FUNCTION : flush
6113 *
6114 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6115 * conditionally restarts channels
6116 *
6117 * PARAMETERS :
6118 * @ restartChannels: re-start all channels
6119 *
6120 *
6121 * RETURN :
6122 * 0 on success
6123 * Error code on failure
6124 *==========================================================================*/
6125int QCamera3HardwareInterface::flush(bool restartChannels)
6126{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006127 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006128 int32_t rc = NO_ERROR;
6129
6130 LOGD("Unblocking Process Capture Request");
6131 pthread_mutex_lock(&mMutex);
6132 mFlush = true;
6133 pthread_mutex_unlock(&mMutex);
6134
6135 rc = stopAllChannels();
6136 // unlink of dualcam
6137 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006138 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6139 &m_pDualCamCmdPtr->bundle_info;
6140 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006141 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6142 pthread_mutex_lock(&gCamLock);
6143
6144 if (mIsMainCamera == 1) {
6145 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6146 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006147 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006148 // related session id should be session id of linked session
6149 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6150 } else {
6151 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6152 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006153 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006154 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6155 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006156 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006157 pthread_mutex_unlock(&gCamLock);
6158
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006159 rc = mCameraHandle->ops->set_dual_cam_cmd(
6160 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006161 if (rc < 0) {
6162 LOGE("Dualcam: Unlink failed, but still proceed to close");
6163 }
6164 }
6165
6166 if (rc < 0) {
6167 LOGE("stopAllChannels failed");
6168 return rc;
6169 }
6170 if (mChannelHandle) {
6171 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6172 mChannelHandle);
6173 }
6174
6175 // Reset bundle info
6176 rc = setBundleInfo();
6177 if (rc < 0) {
6178 LOGE("setBundleInfo failed %d", rc);
6179 return rc;
6180 }
6181
6182 // Mutex Lock
6183 pthread_mutex_lock(&mMutex);
6184
6185 // Unblock process_capture_request
6186 mPendingLiveRequest = 0;
6187 pthread_cond_signal(&mRequestCond);
6188
6189 rc = notifyErrorForPendingRequests();
6190 if (rc < 0) {
6191 LOGE("notifyErrorForPendingRequests failed");
6192 pthread_mutex_unlock(&mMutex);
6193 return rc;
6194 }
6195
6196 mFlush = false;
6197
6198 // Start the Streams/Channels
6199 if (restartChannels) {
6200 rc = startAllChannels();
6201 if (rc < 0) {
6202 LOGE("startAllChannels failed");
6203 pthread_mutex_unlock(&mMutex);
6204 return rc;
6205 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006206 if (mChannelHandle) {
6207 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006208 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006209 if (rc < 0) {
6210 LOGE("start_channel failed");
6211 pthread_mutex_unlock(&mMutex);
6212 return rc;
6213 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006214 }
6215 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006216 pthread_mutex_unlock(&mMutex);
6217
6218 return 0;
6219}
6220
6221/*===========================================================================
6222 * FUNCTION : flushPerf
6223 *
6224 * DESCRIPTION: This is the performance optimization version of flush that does
6225 * not use stream off, rather flushes the system
6226 *
6227 * PARAMETERS :
6228 *
6229 *
6230 * RETURN : 0 : success
6231 * -EINVAL: input is malformed (device is not valid)
6232 * -ENODEV: if the device has encountered a serious error
6233 *==========================================================================*/
6234int QCamera3HardwareInterface::flushPerf()
6235{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006236 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006237 int32_t rc = 0;
6238 struct timespec timeout;
6239 bool timed_wait = false;
6240
6241 pthread_mutex_lock(&mMutex);
6242 mFlushPerf = true;
6243 mPendingBuffersMap.numPendingBufsAtFlush =
6244 mPendingBuffersMap.get_num_overall_buffers();
6245 LOGD("Calling flush. Wait for %d buffers to return",
6246 mPendingBuffersMap.numPendingBufsAtFlush);
6247
6248 /* send the flush event to the backend */
6249 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6250 if (rc < 0) {
6251 LOGE("Error in flush: IOCTL failure");
6252 mFlushPerf = false;
6253 pthread_mutex_unlock(&mMutex);
6254 return -ENODEV;
6255 }
6256
6257 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6258 LOGD("No pending buffers in HAL, return flush");
6259 mFlushPerf = false;
6260 pthread_mutex_unlock(&mMutex);
6261 return rc;
6262 }
6263
6264 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006265 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006266 if (rc < 0) {
6267 LOGE("Error reading the real time clock, cannot use timed wait");
6268 } else {
6269 timeout.tv_sec += FLUSH_TIMEOUT;
6270 timed_wait = true;
6271 }
6272
6273 //Block on conditional variable
6274 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6275 LOGD("Waiting on mBuffersCond");
6276 if (!timed_wait) {
6277 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6278 if (rc != 0) {
6279 LOGE("pthread_cond_wait failed due to rc = %s",
6280 strerror(rc));
6281 break;
6282 }
6283 } else {
6284 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6285 if (rc != 0) {
6286 LOGE("pthread_cond_timedwait failed due to rc = %s",
6287 strerror(rc));
6288 break;
6289 }
6290 }
6291 }
6292 if (rc != 0) {
6293 mFlushPerf = false;
6294 pthread_mutex_unlock(&mMutex);
6295 return -ENODEV;
6296 }
6297
6298 LOGD("Received buffers, now safe to return them");
6299
6300 //make sure the channels handle flush
6301 //currently only required for the picture channel to release snapshot resources
6302 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6303 it != mStreamInfo.end(); it++) {
6304 QCamera3Channel *channel = (*it)->channel;
6305 if (channel) {
6306 rc = channel->flush();
6307 if (rc) {
6308 LOGE("Flushing the channels failed with error %d", rc);
6309 // even though the channel flush failed we need to continue and
6310 // return the buffers we have to the framework, however the return
6311 // value will be an error
6312 rc = -ENODEV;
6313 }
6314 }
6315 }
6316
6317 /* notify the frameworks and send errored results */
6318 rc = notifyErrorForPendingRequests();
6319 if (rc < 0) {
6320 LOGE("notifyErrorForPendingRequests failed");
6321 pthread_mutex_unlock(&mMutex);
6322 return rc;
6323 }
6324
6325 //unblock process_capture_request
6326 mPendingLiveRequest = 0;
6327 unblockRequestIfNecessary();
6328
6329 mFlushPerf = false;
6330 pthread_mutex_unlock(&mMutex);
6331 LOGD ("Flush Operation complete. rc = %d", rc);
6332 return rc;
6333}
6334
6335/*===========================================================================
6336 * FUNCTION : handleCameraDeviceError
6337 *
6338 * DESCRIPTION: This function calls internal flush and notifies the error to
6339 * framework and updates the state variable.
6340 *
6341 * PARAMETERS : None
6342 *
6343 * RETURN : NO_ERROR on Success
6344 * Error code on failure
6345 *==========================================================================*/
6346int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6347{
6348 int32_t rc = NO_ERROR;
6349
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006350 {
6351 Mutex::Autolock lock(mFlushLock);
6352 pthread_mutex_lock(&mMutex);
6353 if (mState != ERROR) {
6354 //if mState != ERROR, nothing to be done
6355 pthread_mutex_unlock(&mMutex);
6356 return NO_ERROR;
6357 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006358 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006359
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006360 rc = flush(false /* restart channels */);
6361 if (NO_ERROR != rc) {
6362 LOGE("internal flush to handle mState = ERROR failed");
6363 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006364
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006365 pthread_mutex_lock(&mMutex);
6366 mState = DEINIT;
6367 pthread_mutex_unlock(&mMutex);
6368 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006369
6370 camera3_notify_msg_t notify_msg;
6371 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6372 notify_msg.type = CAMERA3_MSG_ERROR;
6373 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6374 notify_msg.message.error.error_stream = NULL;
6375 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006376 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006377
6378 return rc;
6379}
6380
6381/*===========================================================================
6382 * FUNCTION : captureResultCb
6383 *
6384 * DESCRIPTION: Callback handler for all capture result
6385 * (streams, as well as metadata)
6386 *
6387 * PARAMETERS :
6388 * @metadata : metadata information
6389 * @buffer : actual gralloc buffer to be returned to frameworks.
6390 * NULL if metadata.
6391 *
6392 * RETURN : NONE
6393 *==========================================================================*/
6394void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6395 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6396{
6397 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006398 pthread_mutex_lock(&mMutex);
6399 uint8_t batchSize = mBatchSize;
6400 pthread_mutex_unlock(&mMutex);
6401 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006402 handleBatchMetadata(metadata_buf,
6403 true /* free_and_bufdone_meta_buf */);
6404 } else { /* mBatchSize = 0 */
6405 hdrPlusPerfLock(metadata_buf);
6406 pthread_mutex_lock(&mMutex);
6407 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006408 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006409 true /* last urgent frame of batch metadata */,
6410 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006411 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006412 pthread_mutex_unlock(&mMutex);
6413 }
6414 } else if (isInputBuffer) {
6415 pthread_mutex_lock(&mMutex);
6416 handleInputBufferWithLock(frame_number);
6417 pthread_mutex_unlock(&mMutex);
6418 } else {
6419 pthread_mutex_lock(&mMutex);
6420 handleBufferWithLock(buffer, frame_number);
6421 pthread_mutex_unlock(&mMutex);
6422 }
6423 return;
6424}
6425
6426/*===========================================================================
6427 * FUNCTION : getReprocessibleOutputStreamId
6428 *
6429 * DESCRIPTION: Get source output stream id for the input reprocess stream
6430 * based on size and format, which would be the largest
6431 * output stream if an input stream exists.
6432 *
6433 * PARAMETERS :
6434 * @id : return the stream id if found
6435 *
6436 * RETURN : int32_t type of status
6437 * NO_ERROR -- success
6438 * none-zero failure code
6439 *==========================================================================*/
6440int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6441{
6442 /* check if any output or bidirectional stream with the same size and format
6443 and return that stream */
6444 if ((mInputStreamInfo.dim.width > 0) &&
6445 (mInputStreamInfo.dim.height > 0)) {
6446 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6447 it != mStreamInfo.end(); it++) {
6448
6449 camera3_stream_t *stream = (*it)->stream;
6450 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6451 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6452 (stream->format == mInputStreamInfo.format)) {
6453 // Usage flag for an input stream and the source output stream
6454 // may be different.
6455 LOGD("Found reprocessible output stream! %p", *it);
6456 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6457 stream->usage, mInputStreamInfo.usage);
6458
6459 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6460 if (channel != NULL && channel->mStreams[0]) {
6461 id = channel->mStreams[0]->getMyServerID();
6462 return NO_ERROR;
6463 }
6464 }
6465 }
6466 } else {
6467 LOGD("No input stream, so no reprocessible output stream");
6468 }
6469 return NAME_NOT_FOUND;
6470}
6471
6472/*===========================================================================
6473 * FUNCTION : lookupFwkName
6474 *
6475 * DESCRIPTION: In case the enum is not same in fwk and backend
6476 * make sure the parameter is correctly propogated
6477 *
6478 * PARAMETERS :
6479 * @arr : map between the two enums
6480 * @len : len of the map
6481 * @hal_name : name of the hal_parm to map
6482 *
6483 * RETURN : int type of status
6484 * fwk_name -- success
6485 * none-zero failure code
6486 *==========================================================================*/
6487template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6488 size_t len, halType hal_name)
6489{
6490
6491 for (size_t i = 0; i < len; i++) {
6492 if (arr[i].hal_name == hal_name) {
6493 return arr[i].fwk_name;
6494 }
6495 }
6496
6497 /* Not able to find matching framework type is not necessarily
6498 * an error case. This happens when mm-camera supports more attributes
6499 * than the frameworks do */
6500 LOGH("Cannot find matching framework type");
6501 return NAME_NOT_FOUND;
6502}
6503
6504/*===========================================================================
6505 * FUNCTION : lookupHalName
6506 *
6507 * DESCRIPTION: In case the enum is not same in fwk and backend
6508 * make sure the parameter is correctly propogated
6509 *
6510 * PARAMETERS :
6511 * @arr : map between the two enums
6512 * @len : len of the map
6513 * @fwk_name : name of the hal_parm to map
6514 *
6515 * RETURN : int32_t type of status
6516 * hal_name -- success
6517 * none-zero failure code
6518 *==========================================================================*/
6519template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6520 size_t len, fwkType fwk_name)
6521{
6522 for (size_t i = 0; i < len; i++) {
6523 if (arr[i].fwk_name == fwk_name) {
6524 return arr[i].hal_name;
6525 }
6526 }
6527
6528 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6529 return NAME_NOT_FOUND;
6530}
6531
6532/*===========================================================================
6533 * FUNCTION : lookupProp
6534 *
6535 * DESCRIPTION: lookup a value by its name
6536 *
6537 * PARAMETERS :
6538 * @arr : map between the two enums
6539 * @len : size of the map
6540 * @name : name to be looked up
6541 *
6542 * RETURN : Value if found
6543 * CAM_CDS_MODE_MAX if not found
6544 *==========================================================================*/
6545template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6546 size_t len, const char *name)
6547{
6548 if (name) {
6549 for (size_t i = 0; i < len; i++) {
6550 if (!strcmp(arr[i].desc, name)) {
6551 return arr[i].val;
6552 }
6553 }
6554 }
6555 return CAM_CDS_MODE_MAX;
6556}
6557
6558/*===========================================================================
6559 *
6560 * DESCRIPTION:
6561 *
6562 * PARAMETERS :
6563 * @metadata : metadata information from callback
6564 * @timestamp: metadata buffer timestamp
6565 * @request_id: request id
6566 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006567 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006568 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6569 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006570 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006571 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6572 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006573 *
6574 * RETURN : camera_metadata_t*
6575 * metadata in a format specified by fwk
6576 *==========================================================================*/
6577camera_metadata_t*
6578QCamera3HardwareInterface::translateFromHalMetadata(
6579 metadata_buffer_t *metadata,
6580 nsecs_t timestamp,
6581 int32_t request_id,
6582 const CameraMetadata& jpegMetadata,
6583 uint8_t pipeline_depth,
6584 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006585 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006586 /* DevCamDebug metadata translateFromHalMetadata argument */
6587 uint8_t DevCamDebug_meta_enable,
6588 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006589 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006590 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006591 bool lastMetadataInBatch,
6592 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006593{
6594 CameraMetadata camMetadata;
6595 camera_metadata_t *resultMetadata;
6596
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006597 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006598 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6599 * Timestamp is needed because it's used for shutter notify calculation.
6600 * */
6601 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6602 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006603 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006604 }
6605
Thierry Strudel3d639192016-09-09 11:52:26 -07006606 if (jpegMetadata.entryCount())
6607 camMetadata.append(jpegMetadata);
6608
6609 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6610 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6611 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6612 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006613 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006614 if (mBatchSize == 0) {
6615 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6616 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6617 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006618
Samuel Ha68ba5172016-12-15 18:41:12 -08006619 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6620 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6621 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6622 // DevCamDebug metadata translateFromHalMetadata AF
6623 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6624 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6625 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6626 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6627 }
6628 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6629 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6630 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6631 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6632 }
6633 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6634 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6635 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6636 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6637 }
6638 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6639 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6640 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6641 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6642 }
6643 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6644 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6645 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6646 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6647 }
6648 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6649 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6650 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6651 *DevCamDebug_af_monitor_pdaf_target_pos;
6652 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6653 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6654 }
6655 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6656 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6657 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6658 *DevCamDebug_af_monitor_pdaf_confidence;
6659 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6660 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6661 }
6662 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6663 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6664 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6665 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6666 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6667 }
6668 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6669 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6670 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6671 *DevCamDebug_af_monitor_tof_target_pos;
6672 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6673 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6674 }
6675 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6676 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6677 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6678 *DevCamDebug_af_monitor_tof_confidence;
6679 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6680 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6681 }
6682 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6683 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6684 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6685 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6686 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6687 }
6688 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6689 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6690 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6691 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6692 &fwk_DevCamDebug_af_monitor_type_select, 1);
6693 }
6694 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6695 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6696 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6697 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6698 &fwk_DevCamDebug_af_monitor_refocus, 1);
6699 }
6700 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6701 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6702 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6703 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6704 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6705 }
6706 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6707 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6708 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6709 *DevCamDebug_af_search_pdaf_target_pos;
6710 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6711 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6712 }
6713 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6714 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6715 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6716 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6717 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6718 }
6719 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6720 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6721 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6722 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6723 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6724 }
6725 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6726 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6727 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6728 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6729 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6730 }
6731 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6732 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6733 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6734 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6735 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6736 }
6737 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6738 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6739 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6740 *DevCamDebug_af_search_tof_target_pos;
6741 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6742 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6743 }
6744 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6745 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6746 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6747 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6748 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6749 }
6750 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6751 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6752 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6753 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6754 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6755 }
6756 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6757 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6758 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6759 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6760 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6761 }
6762 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6763 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6764 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6765 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6766 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6767 }
6768 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6769 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6770 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6771 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6772 &fwk_DevCamDebug_af_search_type_select, 1);
6773 }
6774 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6775 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6776 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6777 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6778 &fwk_DevCamDebug_af_search_next_pos, 1);
6779 }
6780 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6781 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6782 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6783 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6784 &fwk_DevCamDebug_af_search_target_pos, 1);
6785 }
6786 // DevCamDebug metadata translateFromHalMetadata AEC
6787 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6788 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6789 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6790 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6791 }
6792 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6793 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6794 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6795 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6796 }
6797 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6798 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6799 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6800 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6801 }
6802 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6803 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6804 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6805 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6806 }
6807 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6808 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6809 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6810 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6811 }
6812 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6813 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6814 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6815 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6816 }
6817 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6818 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6819 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6820 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6821 }
6822 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6823 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6824 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6825 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6826 }
Samuel Ha34229982017-02-17 13:51:11 -08006827 // DevCamDebug metadata translateFromHalMetadata zzHDR
6828 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6829 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6830 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6831 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6832 }
6833 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6834 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006835 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006836 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6837 }
6838 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6839 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6840 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6841 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6842 }
6843 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6844 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006845 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006846 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6847 }
6848 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6849 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6850 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6851 *DevCamDebug_aec_hdr_sensitivity_ratio;
6852 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6853 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6854 }
6855 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6856 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6857 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6858 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6859 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6860 }
6861 // DevCamDebug metadata translateFromHalMetadata ADRC
6862 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6863 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6864 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6865 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6866 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6867 }
6868 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6869 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6870 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6871 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6872 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6873 }
6874 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6875 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6876 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6877 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6878 }
6879 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6880 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6881 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6882 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6883 }
6884 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6885 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6886 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6887 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6888 }
6889 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6890 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6891 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6892 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6893 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006894 // DevCamDebug metadata translateFromHalMetadata AWB
6895 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6896 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6897 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6898 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6899 }
6900 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6901 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6902 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6903 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6904 }
6905 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6906 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6907 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6908 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6909 }
6910 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6911 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6912 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6913 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6914 }
6915 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6916 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6917 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6918 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6919 }
6920 }
6921 // atrace_end(ATRACE_TAG_ALWAYS);
6922
Thierry Strudel3d639192016-09-09 11:52:26 -07006923 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6924 int64_t fwk_frame_number = *frame_number;
6925 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6926 }
6927
6928 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6929 int32_t fps_range[2];
6930 fps_range[0] = (int32_t)float_range->min_fps;
6931 fps_range[1] = (int32_t)float_range->max_fps;
6932 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6933 fps_range, 2);
6934 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6935 fps_range[0], fps_range[1]);
6936 }
6937
6938 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6939 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6940 }
6941
6942 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6943 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6944 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6945 *sceneMode);
6946 if (NAME_NOT_FOUND != val) {
6947 uint8_t fwkSceneMode = (uint8_t)val;
6948 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6949 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6950 fwkSceneMode);
6951 }
6952 }
6953
6954 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6955 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6956 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6957 }
6958
6959 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6960 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6961 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6962 }
6963
6964 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6965 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6966 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6967 }
6968
6969 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6970 CAM_INTF_META_EDGE_MODE, metadata) {
6971 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6972 }
6973
6974 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6975 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6976 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6977 }
6978
6979 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6980 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6981 }
6982
6983 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6984 if (0 <= *flashState) {
6985 uint8_t fwk_flashState = (uint8_t) *flashState;
6986 if (!gCamCapability[mCameraId]->flash_available) {
6987 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6988 }
6989 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6990 }
6991 }
6992
6993 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6994 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6995 if (NAME_NOT_FOUND != val) {
6996 uint8_t fwk_flashMode = (uint8_t)val;
6997 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6998 }
6999 }
7000
7001 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7002 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7003 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7004 }
7005
7006 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7007 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7008 }
7009
7010 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7011 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7012 }
7013
7014 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7015 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7016 }
7017
7018 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7019 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7020 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7021 }
7022
7023 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7024 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7025 LOGD("fwk_videoStab = %d", fwk_videoStab);
7026 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7027 } else {
7028 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7029 // and so hardcoding the Video Stab result to OFF mode.
7030 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7031 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007032 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007033 }
7034
7035 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7036 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7037 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7038 }
7039
7040 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7041 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7042 }
7043
Thierry Strudel3d639192016-09-09 11:52:26 -07007044 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7045 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007046 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007047
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007048 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7049 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007050
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007051 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007052 blackLevelAppliedPattern->cam_black_level[0],
7053 blackLevelAppliedPattern->cam_black_level[1],
7054 blackLevelAppliedPattern->cam_black_level[2],
7055 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007056 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7057 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007058
7059#ifndef USE_HAL_3_3
7060 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307061 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007062 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307063 fwk_blackLevelInd[0] /= 16.0;
7064 fwk_blackLevelInd[1] /= 16.0;
7065 fwk_blackLevelInd[2] /= 16.0;
7066 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007067 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7068 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007069#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007070 }
7071
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007072#ifndef USE_HAL_3_3
7073 // Fixed whitelevel is used by ISP/Sensor
7074 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7075 &gCamCapability[mCameraId]->white_level, 1);
7076#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007077
7078 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7079 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7080 int32_t scalerCropRegion[4];
7081 scalerCropRegion[0] = hScalerCropRegion->left;
7082 scalerCropRegion[1] = hScalerCropRegion->top;
7083 scalerCropRegion[2] = hScalerCropRegion->width;
7084 scalerCropRegion[3] = hScalerCropRegion->height;
7085
7086 // Adjust crop region from sensor output coordinate system to active
7087 // array coordinate system.
7088 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7089 scalerCropRegion[2], scalerCropRegion[3]);
7090
7091 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7092 }
7093
7094 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7095 LOGD("sensorExpTime = %lld", *sensorExpTime);
7096 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7097 }
7098
7099 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7100 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7101 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7102 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7103 }
7104
7105 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7106 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7107 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7108 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7109 sensorRollingShutterSkew, 1);
7110 }
7111
7112 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7113 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7114 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7115
7116 //calculate the noise profile based on sensitivity
7117 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7118 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7119 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7120 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7121 noise_profile[i] = noise_profile_S;
7122 noise_profile[i+1] = noise_profile_O;
7123 }
7124 LOGD("noise model entry (S, O) is (%f, %f)",
7125 noise_profile_S, noise_profile_O);
7126 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7127 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7128 }
7129
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007130#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007131 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007133 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007134 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007135 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7136 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7137 }
7138 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007139#endif
7140
Thierry Strudel3d639192016-09-09 11:52:26 -07007141 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7142 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7143 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7144 }
7145
7146 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7147 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7148 *faceDetectMode);
7149 if (NAME_NOT_FOUND != val) {
7150 uint8_t fwk_faceDetectMode = (uint8_t)val;
7151 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7152
7153 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7154 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7155 CAM_INTF_META_FACE_DETECTION, metadata) {
7156 uint8_t numFaces = MIN(
7157 faceDetectionInfo->num_faces_detected, MAX_ROI);
7158 int32_t faceIds[MAX_ROI];
7159 uint8_t faceScores[MAX_ROI];
7160 int32_t faceRectangles[MAX_ROI * 4];
7161 int32_t faceLandmarks[MAX_ROI * 6];
7162 size_t j = 0, k = 0;
7163
7164 for (size_t i = 0; i < numFaces; i++) {
7165 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7166 // Adjust crop region from sensor output coordinate system to active
7167 // array coordinate system.
7168 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7169 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7170 rect.width, rect.height);
7171
7172 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7173 faceRectangles+j, -1);
7174
Jason Lee8ce36fa2017-04-19 19:40:37 -07007175 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7176 "bottom-right (%d, %d)",
7177 faceDetectionInfo->frame_id, i,
7178 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7179 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7180
Thierry Strudel3d639192016-09-09 11:52:26 -07007181 j+= 4;
7182 }
7183 if (numFaces <= 0) {
7184 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7185 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7186 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7187 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7188 }
7189
7190 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7191 numFaces);
7192 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7193 faceRectangles, numFaces * 4U);
7194 if (fwk_faceDetectMode ==
7195 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7196 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7197 CAM_INTF_META_FACE_LANDMARK, metadata) {
7198
7199 for (size_t i = 0; i < numFaces; i++) {
7200 // Map the co-ordinate sensor output coordinate system to active
7201 // array coordinate system.
7202 mCropRegionMapper.toActiveArray(
7203 landmarks->face_landmarks[i].left_eye_center.x,
7204 landmarks->face_landmarks[i].left_eye_center.y);
7205 mCropRegionMapper.toActiveArray(
7206 landmarks->face_landmarks[i].right_eye_center.x,
7207 landmarks->face_landmarks[i].right_eye_center.y);
7208 mCropRegionMapper.toActiveArray(
7209 landmarks->face_landmarks[i].mouth_center.x,
7210 landmarks->face_landmarks[i].mouth_center.y);
7211
7212 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007213
7214 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7215 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7216 faceDetectionInfo->frame_id, i,
7217 faceLandmarks[k + LEFT_EYE_X],
7218 faceLandmarks[k + LEFT_EYE_Y],
7219 faceLandmarks[k + RIGHT_EYE_X],
7220 faceLandmarks[k + RIGHT_EYE_Y],
7221 faceLandmarks[k + MOUTH_X],
7222 faceLandmarks[k + MOUTH_Y]);
7223
Thierry Strudel04e026f2016-10-10 11:27:36 -07007224 k+= TOTAL_LANDMARK_INDICES;
7225 }
7226 } else {
7227 for (size_t i = 0; i < numFaces; i++) {
7228 setInvalidLandmarks(faceLandmarks+k);
7229 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007230 }
7231 }
7232
Jason Lee49619db2017-04-13 12:07:22 -07007233 for (size_t i = 0; i < numFaces; i++) {
7234 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7235
7236 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7237 faceDetectionInfo->frame_id, i, faceIds[i]);
7238 }
7239
Thierry Strudel3d639192016-09-09 11:52:26 -07007240 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7241 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7242 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007243 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007244 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7245 CAM_INTF_META_FACE_BLINK, metadata) {
7246 uint8_t detected[MAX_ROI];
7247 uint8_t degree[MAX_ROI * 2];
7248 for (size_t i = 0; i < numFaces; i++) {
7249 detected[i] = blinks->blink[i].blink_detected;
7250 degree[2 * i] = blinks->blink[i].left_blink;
7251 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007252
Jason Lee49619db2017-04-13 12:07:22 -07007253 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7254 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7255 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7256 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007257 }
7258 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7259 detected, numFaces);
7260 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7261 degree, numFaces * 2);
7262 }
7263 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7264 CAM_INTF_META_FACE_SMILE, metadata) {
7265 uint8_t degree[MAX_ROI];
7266 uint8_t confidence[MAX_ROI];
7267 for (size_t i = 0; i < numFaces; i++) {
7268 degree[i] = smiles->smile[i].smile_degree;
7269 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007270
Jason Lee49619db2017-04-13 12:07:22 -07007271 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7272 "smile_degree=%d, smile_score=%d",
7273 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007274 }
7275 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7276 degree, numFaces);
7277 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7278 confidence, numFaces);
7279 }
7280 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7281 CAM_INTF_META_FACE_GAZE, metadata) {
7282 int8_t angle[MAX_ROI];
7283 int32_t direction[MAX_ROI * 3];
7284 int8_t degree[MAX_ROI * 2];
7285 for (size_t i = 0; i < numFaces; i++) {
7286 angle[i] = gazes->gaze[i].gaze_angle;
7287 direction[3 * i] = gazes->gaze[i].updown_dir;
7288 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7289 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7290 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7291 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007292
7293 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7294 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7295 "left_right_gaze=%d, top_bottom_gaze=%d",
7296 faceDetectionInfo->frame_id, i, angle[i],
7297 direction[3 * i], direction[3 * i + 1],
7298 direction[3 * i + 2],
7299 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007300 }
7301 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7302 (uint8_t *)angle, numFaces);
7303 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7304 direction, numFaces * 3);
7305 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7306 (uint8_t *)degree, numFaces * 2);
7307 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007308 }
7309 }
7310 }
7311 }
7312
7313 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7314 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007315 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007316 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007317 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007318
Shuzhen Wang14415f52016-11-16 18:26:18 -08007319 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7320 histogramBins = *histBins;
7321 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7322 }
7323
7324 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007325 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7326 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007327 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007328
7329 switch (stats_data->type) {
7330 case CAM_HISTOGRAM_TYPE_BAYER:
7331 switch (stats_data->bayer_stats.data_type) {
7332 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007333 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7334 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007335 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007336 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7337 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007338 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007339 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7340 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007341 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007342 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007343 case CAM_STATS_CHANNEL_R:
7344 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007345 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7346 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007347 }
7348 break;
7349 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007350 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007351 break;
7352 }
7353
Shuzhen Wang14415f52016-11-16 18:26:18 -08007354 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007355 }
7356 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007357 }
7358
7359 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7360 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7361 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7362 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7363 }
7364
7365 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7366 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7367 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7368 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7369 }
7370
7371 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7372 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7373 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7374 CAM_MAX_SHADING_MAP_HEIGHT);
7375 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7376 CAM_MAX_SHADING_MAP_WIDTH);
7377 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7378 lensShadingMap->lens_shading, 4U * map_width * map_height);
7379 }
7380
7381 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7382 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7383 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7384 }
7385
7386 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7387 //Populate CAM_INTF_META_TONEMAP_CURVES
7388 /* ch0 = G, ch 1 = B, ch 2 = R*/
7389 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7390 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7391 tonemap->tonemap_points_cnt,
7392 CAM_MAX_TONEMAP_CURVE_SIZE);
7393 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7394 }
7395
7396 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7397 &tonemap->curves[0].tonemap_points[0][0],
7398 tonemap->tonemap_points_cnt * 2);
7399
7400 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7401 &tonemap->curves[1].tonemap_points[0][0],
7402 tonemap->tonemap_points_cnt * 2);
7403
7404 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7405 &tonemap->curves[2].tonemap_points[0][0],
7406 tonemap->tonemap_points_cnt * 2);
7407 }
7408
7409 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7410 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7411 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7412 CC_GAIN_MAX);
7413 }
7414
7415 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7416 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7417 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7418 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7419 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7420 }
7421
7422 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7423 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7424 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7425 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7426 toneCurve->tonemap_points_cnt,
7427 CAM_MAX_TONEMAP_CURVE_SIZE);
7428 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7429 }
7430 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7431 (float*)toneCurve->curve.tonemap_points,
7432 toneCurve->tonemap_points_cnt * 2);
7433 }
7434
7435 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7436 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7437 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7438 predColorCorrectionGains->gains, 4);
7439 }
7440
7441 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7442 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7443 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7444 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7445 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7446 }
7447
7448 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7449 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7450 }
7451
7452 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7453 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7454 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7455 }
7456
7457 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7458 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7459 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7460 }
7461
7462 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7463 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7464 *effectMode);
7465 if (NAME_NOT_FOUND != val) {
7466 uint8_t fwk_effectMode = (uint8_t)val;
7467 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7468 }
7469 }
7470
7471 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7472 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7473 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7474 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7475 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7476 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7477 }
7478 int32_t fwk_testPatternData[4];
7479 fwk_testPatternData[0] = testPatternData->r;
7480 fwk_testPatternData[3] = testPatternData->b;
7481 switch (gCamCapability[mCameraId]->color_arrangement) {
7482 case CAM_FILTER_ARRANGEMENT_RGGB:
7483 case CAM_FILTER_ARRANGEMENT_GRBG:
7484 fwk_testPatternData[1] = testPatternData->gr;
7485 fwk_testPatternData[2] = testPatternData->gb;
7486 break;
7487 case CAM_FILTER_ARRANGEMENT_GBRG:
7488 case CAM_FILTER_ARRANGEMENT_BGGR:
7489 fwk_testPatternData[2] = testPatternData->gr;
7490 fwk_testPatternData[1] = testPatternData->gb;
7491 break;
7492 default:
7493 LOGE("color arrangement %d is not supported",
7494 gCamCapability[mCameraId]->color_arrangement);
7495 break;
7496 }
7497 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7498 }
7499
7500 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7501 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7502 }
7503
7504 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7505 String8 str((const char *)gps_methods);
7506 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7507 }
7508
7509 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7510 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7511 }
7512
7513 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7514 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7515 }
7516
7517 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7518 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7519 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7520 }
7521
7522 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7523 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7524 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7525 }
7526
7527 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7528 int32_t fwk_thumb_size[2];
7529 fwk_thumb_size[0] = thumb_size->width;
7530 fwk_thumb_size[1] = thumb_size->height;
7531 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7532 }
7533
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007534 // Skip reprocess metadata if there is no input stream.
7535 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7536 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7537 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7538 privateData,
7539 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007541 }
7542
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007543 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007544 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007545 meteringMode, 1);
7546 }
7547
Thierry Strudel54dc9782017-02-15 12:12:10 -08007548 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7549 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7550 LOGD("hdr_scene_data: %d %f\n",
7551 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7552 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7553 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7554 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7555 &isHdr, 1);
7556 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7557 &isHdrConfidence, 1);
7558 }
7559
7560
7561
Thierry Strudel3d639192016-09-09 11:52:26 -07007562 if (metadata->is_tuning_params_valid) {
7563 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7564 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7565 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7566
7567
7568 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7569 sizeof(uint32_t));
7570 data += sizeof(uint32_t);
7571
7572 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7573 sizeof(uint32_t));
7574 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7575 data += sizeof(uint32_t);
7576
7577 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7578 sizeof(uint32_t));
7579 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7580 data += sizeof(uint32_t);
7581
7582 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7583 sizeof(uint32_t));
7584 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7585 data += sizeof(uint32_t);
7586
7587 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7588 sizeof(uint32_t));
7589 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7590 data += sizeof(uint32_t);
7591
7592 metadata->tuning_params.tuning_mod3_data_size = 0;
7593 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7594 sizeof(uint32_t));
7595 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7596 data += sizeof(uint32_t);
7597
7598 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7599 TUNING_SENSOR_DATA_MAX);
7600 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7601 count);
7602 data += count;
7603
7604 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7605 TUNING_VFE_DATA_MAX);
7606 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7607 count);
7608 data += count;
7609
7610 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7611 TUNING_CPP_DATA_MAX);
7612 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7613 count);
7614 data += count;
7615
7616 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7617 TUNING_CAC_DATA_MAX);
7618 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7619 count);
7620 data += count;
7621
7622 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7623 (int32_t *)(void *)tuning_meta_data_blob,
7624 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7625 }
7626
7627 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7628 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7629 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7630 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7631 NEUTRAL_COL_POINTS);
7632 }
7633
7634 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7635 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7636 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7637 }
7638
7639 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7640 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7641 // Adjust crop region from sensor output coordinate system to active
7642 // array coordinate system.
7643 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7644 hAeRegions->rect.width, hAeRegions->rect.height);
7645
7646 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7647 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7648 REGIONS_TUPLE_COUNT);
7649 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7650 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7651 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7652 hAeRegions->rect.height);
7653 }
7654
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007655 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7656 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7657 if (NAME_NOT_FOUND != val) {
7658 uint8_t fwkAfMode = (uint8_t)val;
7659 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7660 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7661 } else {
7662 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7663 val);
7664 }
7665 }
7666
Thierry Strudel3d639192016-09-09 11:52:26 -07007667 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7668 uint8_t fwk_afState = (uint8_t) *afState;
7669 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007670 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007671 }
7672
7673 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7674 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7675 }
7676
7677 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7678 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7679 }
7680
7681 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7682 uint8_t fwk_lensState = *lensState;
7683 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7684 }
7685
Thierry Strudel3d639192016-09-09 11:52:26 -07007686
7687 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007688 uint32_t ab_mode = *hal_ab_mode;
7689 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7690 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7691 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7692 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007693 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007694 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007695 if (NAME_NOT_FOUND != val) {
7696 uint8_t fwk_ab_mode = (uint8_t)val;
7697 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7698 }
7699 }
7700
7701 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7702 int val = lookupFwkName(SCENE_MODES_MAP,
7703 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7704 if (NAME_NOT_FOUND != val) {
7705 uint8_t fwkBestshotMode = (uint8_t)val;
7706 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7707 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7708 } else {
7709 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7710 }
7711 }
7712
7713 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7714 uint8_t fwk_mode = (uint8_t) *mode;
7715 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7716 }
7717
7718 /* Constant metadata values to be update*/
7719 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7720 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7721
7722 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7723 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7724
7725 int32_t hotPixelMap[2];
7726 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7727
7728 // CDS
7729 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7730 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7731 }
7732
Thierry Strudel04e026f2016-10-10 11:27:36 -07007733 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7734 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007735 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007736 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7737 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7738 } else {
7739 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7740 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007741
7742 if(fwk_hdr != curr_hdr_state) {
7743 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7744 if(fwk_hdr)
7745 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7746 else
7747 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7748 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007749 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7750 }
7751
Thierry Strudel54dc9782017-02-15 12:12:10 -08007752 //binning correction
7753 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7754 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7755 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7756 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7757 }
7758
Thierry Strudel04e026f2016-10-10 11:27:36 -07007759 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007760 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007761 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7762 int8_t is_ir_on = 0;
7763
7764 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7765 if(is_ir_on != curr_ir_state) {
7766 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7767 if(is_ir_on)
7768 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7769 else
7770 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7771 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007772 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007773 }
7774
Thierry Strudel269c81a2016-10-12 12:13:59 -07007775 // AEC SPEED
7776 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7777 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7778 }
7779
7780 // AWB SPEED
7781 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7782 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7783 }
7784
Thierry Strudel3d639192016-09-09 11:52:26 -07007785 // TNR
7786 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7787 uint8_t tnr_enable = tnr->denoise_enable;
7788 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007789 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7790 int8_t is_tnr_on = 0;
7791
7792 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7793 if(is_tnr_on != curr_tnr_state) {
7794 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7795 if(is_tnr_on)
7796 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7797 else
7798 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7799 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007800
7801 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7802 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7803 }
7804
7805 // Reprocess crop data
7806 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7807 uint8_t cnt = crop_data->num_of_streams;
7808 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7809 // mm-qcamera-daemon only posts crop_data for streams
7810 // not linked to pproc. So no valid crop metadata is not
7811 // necessarily an error case.
7812 LOGD("No valid crop metadata entries");
7813 } else {
7814 uint32_t reproc_stream_id;
7815 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7816 LOGD("No reprocessible stream found, ignore crop data");
7817 } else {
7818 int rc = NO_ERROR;
7819 Vector<int32_t> roi_map;
7820 int32_t *crop = new int32_t[cnt*4];
7821 if (NULL == crop) {
7822 rc = NO_MEMORY;
7823 }
7824 if (NO_ERROR == rc) {
7825 int32_t streams_found = 0;
7826 for (size_t i = 0; i < cnt; i++) {
7827 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7828 if (pprocDone) {
7829 // HAL already does internal reprocessing,
7830 // either via reprocessing before JPEG encoding,
7831 // or offline postprocessing for pproc bypass case.
7832 crop[0] = 0;
7833 crop[1] = 0;
7834 crop[2] = mInputStreamInfo.dim.width;
7835 crop[3] = mInputStreamInfo.dim.height;
7836 } else {
7837 crop[0] = crop_data->crop_info[i].crop.left;
7838 crop[1] = crop_data->crop_info[i].crop.top;
7839 crop[2] = crop_data->crop_info[i].crop.width;
7840 crop[3] = crop_data->crop_info[i].crop.height;
7841 }
7842 roi_map.add(crop_data->crop_info[i].roi_map.left);
7843 roi_map.add(crop_data->crop_info[i].roi_map.top);
7844 roi_map.add(crop_data->crop_info[i].roi_map.width);
7845 roi_map.add(crop_data->crop_info[i].roi_map.height);
7846 streams_found++;
7847 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7848 crop[0], crop[1], crop[2], crop[3]);
7849 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7850 crop_data->crop_info[i].roi_map.left,
7851 crop_data->crop_info[i].roi_map.top,
7852 crop_data->crop_info[i].roi_map.width,
7853 crop_data->crop_info[i].roi_map.height);
7854 break;
7855
7856 }
7857 }
7858 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7859 &streams_found, 1);
7860 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7861 crop, (size_t)(streams_found * 4));
7862 if (roi_map.array()) {
7863 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7864 roi_map.array(), roi_map.size());
7865 }
7866 }
7867 if (crop) {
7868 delete [] crop;
7869 }
7870 }
7871 }
7872 }
7873
7874 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7875 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7876 // so hardcoding the CAC result to OFF mode.
7877 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7878 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7879 } else {
7880 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7881 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7882 *cacMode);
7883 if (NAME_NOT_FOUND != val) {
7884 uint8_t resultCacMode = (uint8_t)val;
7885 // check whether CAC result from CB is equal to Framework set CAC mode
7886 // If not equal then set the CAC mode came in corresponding request
7887 if (fwk_cacMode != resultCacMode) {
7888 resultCacMode = fwk_cacMode;
7889 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007890 //Check if CAC is disabled by property
7891 if (m_cacModeDisabled) {
7892 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7893 }
7894
Thierry Strudel3d639192016-09-09 11:52:26 -07007895 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7896 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7897 } else {
7898 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7899 }
7900 }
7901 }
7902
7903 // Post blob of cam_cds_data through vendor tag.
7904 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7905 uint8_t cnt = cdsInfo->num_of_streams;
7906 cam_cds_data_t cdsDataOverride;
7907 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7908 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7909 cdsDataOverride.num_of_streams = 1;
7910 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7911 uint32_t reproc_stream_id;
7912 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7913 LOGD("No reprocessible stream found, ignore cds data");
7914 } else {
7915 for (size_t i = 0; i < cnt; i++) {
7916 if (cdsInfo->cds_info[i].stream_id ==
7917 reproc_stream_id) {
7918 cdsDataOverride.cds_info[0].cds_enable =
7919 cdsInfo->cds_info[i].cds_enable;
7920 break;
7921 }
7922 }
7923 }
7924 } else {
7925 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7926 }
7927 camMetadata.update(QCAMERA3_CDS_INFO,
7928 (uint8_t *)&cdsDataOverride,
7929 sizeof(cam_cds_data_t));
7930 }
7931
7932 // Ldaf calibration data
7933 if (!mLdafCalibExist) {
7934 IF_META_AVAILABLE(uint32_t, ldafCalib,
7935 CAM_INTF_META_LDAF_EXIF, metadata) {
7936 mLdafCalibExist = true;
7937 mLdafCalib[0] = ldafCalib[0];
7938 mLdafCalib[1] = ldafCalib[1];
7939 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7940 ldafCalib[0], ldafCalib[1]);
7941 }
7942 }
7943
Thierry Strudel54dc9782017-02-15 12:12:10 -08007944 // EXIF debug data through vendor tag
7945 /*
7946 * Mobicat Mask can assume 3 values:
7947 * 1 refers to Mobicat data,
7948 * 2 refers to Stats Debug and Exif Debug Data
7949 * 3 refers to Mobicat and Stats Debug Data
7950 * We want to make sure that we are sending Exif debug data
7951 * only when Mobicat Mask is 2.
7952 */
7953 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7954 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7955 (uint8_t *)(void *)mExifParams.debug_params,
7956 sizeof(mm_jpeg_debug_exif_params_t));
7957 }
7958
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007959 // Reprocess and DDM debug data through vendor tag
7960 cam_reprocess_info_t repro_info;
7961 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007962 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7963 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007964 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007965 }
7966 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7967 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007968 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007969 }
7970 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7971 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007972 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007973 }
7974 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7975 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007976 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007977 }
7978 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7979 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007980 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007981 }
7982 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007983 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007984 }
7985 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7986 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007987 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007988 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007989 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7990 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7991 }
7992 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7993 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7994 }
7995 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7996 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007997
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007998 // INSTANT AEC MODE
7999 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8000 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8001 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8002 }
8003
Shuzhen Wange763e802016-03-31 10:24:29 -07008004 // AF scene change
8005 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8006 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8007 }
8008
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008009 // Enable ZSL
8010 if (enableZsl != nullptr) {
8011 uint8_t value = *enableZsl ?
8012 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8013 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8014 }
8015
Xu Han821ea9c2017-05-23 09:00:40 -07008016 // OIS Data
8017 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8018 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8019 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8020 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8021 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8022 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8023 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8024 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8025 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8026 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8027 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8028 }
8029
Thierry Strudel3d639192016-09-09 11:52:26 -07008030 resultMetadata = camMetadata.release();
8031 return resultMetadata;
8032}
8033
8034/*===========================================================================
8035 * FUNCTION : saveExifParams
8036 *
8037 * DESCRIPTION:
8038 *
8039 * PARAMETERS :
8040 * @metadata : metadata information from callback
8041 *
8042 * RETURN : none
8043 *
8044 *==========================================================================*/
8045void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8046{
8047 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8048 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8049 if (mExifParams.debug_params) {
8050 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8051 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8052 }
8053 }
8054 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8055 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8056 if (mExifParams.debug_params) {
8057 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8058 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8059 }
8060 }
8061 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8062 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8063 if (mExifParams.debug_params) {
8064 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8065 mExifParams.debug_params->af_debug_params_valid = TRUE;
8066 }
8067 }
8068 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8069 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8070 if (mExifParams.debug_params) {
8071 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8072 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8073 }
8074 }
8075 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8076 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8077 if (mExifParams.debug_params) {
8078 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8079 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8080 }
8081 }
8082 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8083 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8084 if (mExifParams.debug_params) {
8085 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8086 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8087 }
8088 }
8089 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8090 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8091 if (mExifParams.debug_params) {
8092 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8093 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8094 }
8095 }
8096 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8097 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8098 if (mExifParams.debug_params) {
8099 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8100 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8101 }
8102 }
8103}
8104
8105/*===========================================================================
8106 * FUNCTION : get3AExifParams
8107 *
8108 * DESCRIPTION:
8109 *
8110 * PARAMETERS : none
8111 *
8112 *
8113 * RETURN : mm_jpeg_exif_params_t
8114 *
8115 *==========================================================================*/
8116mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8117{
8118 return mExifParams;
8119}
8120
8121/*===========================================================================
8122 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8123 *
8124 * DESCRIPTION:
8125 *
8126 * PARAMETERS :
8127 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008128 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8129 * urgent metadata in a batch. Always true for
8130 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008131 *
8132 * RETURN : camera_metadata_t*
8133 * metadata in a format specified by fwk
8134 *==========================================================================*/
8135camera_metadata_t*
8136QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008137 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008138{
8139 CameraMetadata camMetadata;
8140 camera_metadata_t *resultMetadata;
8141
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008142 if (!lastUrgentMetadataInBatch) {
8143 /* In batch mode, use empty metadata if this is not the last in batch
8144 */
8145 resultMetadata = allocate_camera_metadata(0, 0);
8146 return resultMetadata;
8147 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008148
8149 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8150 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8151 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8152 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8153 }
8154
8155 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8156 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8157 &aecTrigger->trigger, 1);
8158 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8159 &aecTrigger->trigger_id, 1);
8160 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8161 aecTrigger->trigger);
8162 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8163 aecTrigger->trigger_id);
8164 }
8165
8166 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8167 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8168 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8169 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8170 }
8171
Thierry Strudel3d639192016-09-09 11:52:26 -07008172 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8173 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8174 &af_trigger->trigger, 1);
8175 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8176 af_trigger->trigger);
8177 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8178 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8179 af_trigger->trigger_id);
8180 }
8181
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008182 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8183 /*af regions*/
8184 int32_t afRegions[REGIONS_TUPLE_COUNT];
8185 // Adjust crop region from sensor output coordinate system to active
8186 // array coordinate system.
8187 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8188 hAfRegions->rect.width, hAfRegions->rect.height);
8189
8190 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8191 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8192 REGIONS_TUPLE_COUNT);
8193 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8194 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8195 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8196 hAfRegions->rect.height);
8197 }
8198
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008199 // AF region confidence
8200 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8201 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8202 }
8203
Thierry Strudel3d639192016-09-09 11:52:26 -07008204 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8205 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8206 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8207 if (NAME_NOT_FOUND != val) {
8208 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8209 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8210 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8211 } else {
8212 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8213 }
8214 }
8215
8216 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8217 uint32_t aeMode = CAM_AE_MODE_MAX;
8218 int32_t flashMode = CAM_FLASH_MODE_MAX;
8219 int32_t redeye = -1;
8220 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8221 aeMode = *pAeMode;
8222 }
8223 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8224 flashMode = *pFlashMode;
8225 }
8226 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8227 redeye = *pRedeye;
8228 }
8229
8230 if (1 == redeye) {
8231 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8232 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8233 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8234 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8235 flashMode);
8236 if (NAME_NOT_FOUND != val) {
8237 fwk_aeMode = (uint8_t)val;
8238 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8239 } else {
8240 LOGE("Unsupported flash mode %d", flashMode);
8241 }
8242 } else if (aeMode == CAM_AE_MODE_ON) {
8243 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8244 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8245 } else if (aeMode == CAM_AE_MODE_OFF) {
8246 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8247 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008248 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8249 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8250 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008251 } else {
8252 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8253 "flashMode:%d, aeMode:%u!!!",
8254 redeye, flashMode, aeMode);
8255 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008256 if (mInstantAEC) {
8257 // Increment frame Idx count untill a bound reached for instant AEC.
8258 mInstantAecFrameIdxCount++;
8259 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8260 CAM_INTF_META_AEC_INFO, metadata) {
8261 LOGH("ae_params->settled = %d",ae_params->settled);
8262 // If AEC settled, or if number of frames reached bound value,
8263 // should reset instant AEC.
8264 if (ae_params->settled ||
8265 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8266 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8267 mInstantAEC = false;
8268 mResetInstantAEC = true;
8269 mInstantAecFrameIdxCount = 0;
8270 }
8271 }
8272 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008273 resultMetadata = camMetadata.release();
8274 return resultMetadata;
8275}
8276
8277/*===========================================================================
8278 * FUNCTION : dumpMetadataToFile
8279 *
8280 * DESCRIPTION: Dumps tuning metadata to file system
8281 *
8282 * PARAMETERS :
8283 * @meta : tuning metadata
8284 * @dumpFrameCount : current dump frame count
8285 * @enabled : Enable mask
8286 *
8287 *==========================================================================*/
8288void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8289 uint32_t &dumpFrameCount,
8290 bool enabled,
8291 const char *type,
8292 uint32_t frameNumber)
8293{
8294 //Some sanity checks
8295 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8296 LOGE("Tuning sensor data size bigger than expected %d: %d",
8297 meta.tuning_sensor_data_size,
8298 TUNING_SENSOR_DATA_MAX);
8299 return;
8300 }
8301
8302 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8303 LOGE("Tuning VFE data size bigger than expected %d: %d",
8304 meta.tuning_vfe_data_size,
8305 TUNING_VFE_DATA_MAX);
8306 return;
8307 }
8308
8309 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8310 LOGE("Tuning CPP data size bigger than expected %d: %d",
8311 meta.tuning_cpp_data_size,
8312 TUNING_CPP_DATA_MAX);
8313 return;
8314 }
8315
8316 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8317 LOGE("Tuning CAC data size bigger than expected %d: %d",
8318 meta.tuning_cac_data_size,
8319 TUNING_CAC_DATA_MAX);
8320 return;
8321 }
8322 //
8323
8324 if(enabled){
8325 char timeBuf[FILENAME_MAX];
8326 char buf[FILENAME_MAX];
8327 memset(buf, 0, sizeof(buf));
8328 memset(timeBuf, 0, sizeof(timeBuf));
8329 time_t current_time;
8330 struct tm * timeinfo;
8331 time (&current_time);
8332 timeinfo = localtime (&current_time);
8333 if (timeinfo != NULL) {
8334 strftime (timeBuf, sizeof(timeBuf),
8335 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8336 }
8337 String8 filePath(timeBuf);
8338 snprintf(buf,
8339 sizeof(buf),
8340 "%dm_%s_%d.bin",
8341 dumpFrameCount,
8342 type,
8343 frameNumber);
8344 filePath.append(buf);
8345 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8346 if (file_fd >= 0) {
8347 ssize_t written_len = 0;
8348 meta.tuning_data_version = TUNING_DATA_VERSION;
8349 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8350 written_len += write(file_fd, data, sizeof(uint32_t));
8351 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8352 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8353 written_len += write(file_fd, data, sizeof(uint32_t));
8354 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8355 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8356 written_len += write(file_fd, data, sizeof(uint32_t));
8357 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8358 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8359 written_len += write(file_fd, data, sizeof(uint32_t));
8360 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8361 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8362 written_len += write(file_fd, data, sizeof(uint32_t));
8363 meta.tuning_mod3_data_size = 0;
8364 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8365 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8366 written_len += write(file_fd, data, sizeof(uint32_t));
8367 size_t total_size = meta.tuning_sensor_data_size;
8368 data = (void *)((uint8_t *)&meta.data);
8369 written_len += write(file_fd, data, total_size);
8370 total_size = meta.tuning_vfe_data_size;
8371 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8372 written_len += write(file_fd, data, total_size);
8373 total_size = meta.tuning_cpp_data_size;
8374 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8375 written_len += write(file_fd, data, total_size);
8376 total_size = meta.tuning_cac_data_size;
8377 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8378 written_len += write(file_fd, data, total_size);
8379 close(file_fd);
8380 }else {
8381 LOGE("fail to open file for metadata dumping");
8382 }
8383 }
8384}
8385
8386/*===========================================================================
8387 * FUNCTION : cleanAndSortStreamInfo
8388 *
8389 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8390 * and sort them such that raw stream is at the end of the list
8391 * This is a workaround for camera daemon constraint.
8392 *
8393 * PARAMETERS : None
8394 *
8395 *==========================================================================*/
8396void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8397{
8398 List<stream_info_t *> newStreamInfo;
8399
8400 /*clean up invalid streams*/
8401 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8402 it != mStreamInfo.end();) {
8403 if(((*it)->status) == INVALID){
8404 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8405 delete channel;
8406 free(*it);
8407 it = mStreamInfo.erase(it);
8408 } else {
8409 it++;
8410 }
8411 }
8412
8413 // Move preview/video/callback/snapshot streams into newList
8414 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8415 it != mStreamInfo.end();) {
8416 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8417 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8418 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8419 newStreamInfo.push_back(*it);
8420 it = mStreamInfo.erase(it);
8421 } else
8422 it++;
8423 }
8424 // Move raw streams into newList
8425 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8426 it != mStreamInfo.end();) {
8427 newStreamInfo.push_back(*it);
8428 it = mStreamInfo.erase(it);
8429 }
8430
8431 mStreamInfo = newStreamInfo;
8432}
8433
8434/*===========================================================================
8435 * FUNCTION : extractJpegMetadata
8436 *
8437 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8438 * JPEG metadata is cached in HAL, and return as part of capture
8439 * result when metadata is returned from camera daemon.
8440 *
8441 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8442 * @request: capture request
8443 *
8444 *==========================================================================*/
8445void QCamera3HardwareInterface::extractJpegMetadata(
8446 CameraMetadata& jpegMetadata,
8447 const camera3_capture_request_t *request)
8448{
8449 CameraMetadata frame_settings;
8450 frame_settings = request->settings;
8451
8452 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8453 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8454 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8455 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8456
8457 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8458 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8459 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8460 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8461
8462 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8463 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8464 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8465 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8466
8467 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8468 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8469 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8470 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8471
8472 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8473 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8474 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8475 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8476
8477 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8478 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8479 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8480 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8481
8482 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8483 int32_t thumbnail_size[2];
8484 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8485 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8486 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8487 int32_t orientation =
8488 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008489 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008490 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8491 int32_t temp;
8492 temp = thumbnail_size[0];
8493 thumbnail_size[0] = thumbnail_size[1];
8494 thumbnail_size[1] = temp;
8495 }
8496 }
8497 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8498 thumbnail_size,
8499 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8500 }
8501
8502}
8503
8504/*===========================================================================
8505 * FUNCTION : convertToRegions
8506 *
8507 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8508 *
8509 * PARAMETERS :
8510 * @rect : cam_rect_t struct to convert
8511 * @region : int32_t destination array
8512 * @weight : if we are converting from cam_area_t, weight is valid
8513 * else weight = -1
8514 *
8515 *==========================================================================*/
8516void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8517 int32_t *region, int weight)
8518{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008519 region[FACE_LEFT] = rect.left;
8520 region[FACE_TOP] = rect.top;
8521 region[FACE_RIGHT] = rect.left + rect.width;
8522 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008523 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008524 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008525 }
8526}
8527
8528/*===========================================================================
8529 * FUNCTION : convertFromRegions
8530 *
8531 * DESCRIPTION: helper method to convert from array to cam_rect_t
8532 *
8533 * PARAMETERS :
8534 * @rect : cam_rect_t struct to convert
8535 * @region : int32_t destination array
8536 * @weight : if we are converting from cam_area_t, weight is valid
8537 * else weight = -1
8538 *
8539 *==========================================================================*/
8540void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008541 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008542{
Thierry Strudel3d639192016-09-09 11:52:26 -07008543 int32_t x_min = frame_settings.find(tag).data.i32[0];
8544 int32_t y_min = frame_settings.find(tag).data.i32[1];
8545 int32_t x_max = frame_settings.find(tag).data.i32[2];
8546 int32_t y_max = frame_settings.find(tag).data.i32[3];
8547 roi.weight = frame_settings.find(tag).data.i32[4];
8548 roi.rect.left = x_min;
8549 roi.rect.top = y_min;
8550 roi.rect.width = x_max - x_min;
8551 roi.rect.height = y_max - y_min;
8552}
8553
8554/*===========================================================================
8555 * FUNCTION : resetIfNeededROI
8556 *
8557 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8558 * crop region
8559 *
8560 * PARAMETERS :
8561 * @roi : cam_area_t struct to resize
8562 * @scalerCropRegion : cam_crop_region_t region to compare against
8563 *
8564 *
8565 *==========================================================================*/
8566bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8567 const cam_crop_region_t* scalerCropRegion)
8568{
8569 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8570 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8571 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8572 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8573
8574 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8575 * without having this check the calculations below to validate if the roi
8576 * is inside scalar crop region will fail resulting in the roi not being
8577 * reset causing algorithm to continue to use stale roi window
8578 */
8579 if (roi->weight == 0) {
8580 return true;
8581 }
8582
8583 if ((roi_x_max < scalerCropRegion->left) ||
8584 // right edge of roi window is left of scalar crop's left edge
8585 (roi_y_max < scalerCropRegion->top) ||
8586 // bottom edge of roi window is above scalar crop's top edge
8587 (roi->rect.left > crop_x_max) ||
8588 // left edge of roi window is beyond(right) of scalar crop's right edge
8589 (roi->rect.top > crop_y_max)){
8590 // top edge of roi windo is above scalar crop's top edge
8591 return false;
8592 }
8593 if (roi->rect.left < scalerCropRegion->left) {
8594 roi->rect.left = scalerCropRegion->left;
8595 }
8596 if (roi->rect.top < scalerCropRegion->top) {
8597 roi->rect.top = scalerCropRegion->top;
8598 }
8599 if (roi_x_max > crop_x_max) {
8600 roi_x_max = crop_x_max;
8601 }
8602 if (roi_y_max > crop_y_max) {
8603 roi_y_max = crop_y_max;
8604 }
8605 roi->rect.width = roi_x_max - roi->rect.left;
8606 roi->rect.height = roi_y_max - roi->rect.top;
8607 return true;
8608}
8609
8610/*===========================================================================
8611 * FUNCTION : convertLandmarks
8612 *
8613 * DESCRIPTION: helper method to extract the landmarks from face detection info
8614 *
8615 * PARAMETERS :
8616 * @landmark_data : input landmark data to be converted
8617 * @landmarks : int32_t destination array
8618 *
8619 *
8620 *==========================================================================*/
8621void QCamera3HardwareInterface::convertLandmarks(
8622 cam_face_landmarks_info_t landmark_data,
8623 int32_t *landmarks)
8624{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008625 if (landmark_data.is_left_eye_valid) {
8626 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8627 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8628 } else {
8629 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8630 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8631 }
8632
8633 if (landmark_data.is_right_eye_valid) {
8634 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8635 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8636 } else {
8637 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8638 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8639 }
8640
8641 if (landmark_data.is_mouth_valid) {
8642 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8643 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8644 } else {
8645 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8646 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8647 }
8648}
8649
8650/*===========================================================================
8651 * FUNCTION : setInvalidLandmarks
8652 *
8653 * DESCRIPTION: helper method to set invalid landmarks
8654 *
8655 * PARAMETERS :
8656 * @landmarks : int32_t destination array
8657 *
8658 *
8659 *==========================================================================*/
8660void QCamera3HardwareInterface::setInvalidLandmarks(
8661 int32_t *landmarks)
8662{
8663 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8664 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8665 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8666 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8667 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8668 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008669}
8670
8671#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008672
8673/*===========================================================================
8674 * FUNCTION : getCapabilities
8675 *
8676 * DESCRIPTION: query camera capability from back-end
8677 *
8678 * PARAMETERS :
8679 * @ops : mm-interface ops structure
8680 * @cam_handle : camera handle for which we need capability
8681 *
8682 * RETURN : ptr type of capability structure
8683 * capability for success
8684 * NULL for failure
8685 *==========================================================================*/
8686cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8687 uint32_t cam_handle)
8688{
8689 int rc = NO_ERROR;
8690 QCamera3HeapMemory *capabilityHeap = NULL;
8691 cam_capability_t *cap_ptr = NULL;
8692
8693 if (ops == NULL) {
8694 LOGE("Invalid arguments");
8695 return NULL;
8696 }
8697
8698 capabilityHeap = new QCamera3HeapMemory(1);
8699 if (capabilityHeap == NULL) {
8700 LOGE("creation of capabilityHeap failed");
8701 return NULL;
8702 }
8703
8704 /* Allocate memory for capability buffer */
8705 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8706 if(rc != OK) {
8707 LOGE("No memory for cappability");
8708 goto allocate_failed;
8709 }
8710
8711 /* Map memory for capability buffer */
8712 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8713
8714 rc = ops->map_buf(cam_handle,
8715 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8716 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8717 if(rc < 0) {
8718 LOGE("failed to map capability buffer");
8719 rc = FAILED_TRANSACTION;
8720 goto map_failed;
8721 }
8722
8723 /* Query Capability */
8724 rc = ops->query_capability(cam_handle);
8725 if(rc < 0) {
8726 LOGE("failed to query capability");
8727 rc = FAILED_TRANSACTION;
8728 goto query_failed;
8729 }
8730
8731 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8732 if (cap_ptr == NULL) {
8733 LOGE("out of memory");
8734 rc = NO_MEMORY;
8735 goto query_failed;
8736 }
8737
8738 memset(cap_ptr, 0, sizeof(cam_capability_t));
8739 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8740
8741 int index;
8742 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8743 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8744 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8745 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8746 }
8747
8748query_failed:
8749 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8750map_failed:
8751 capabilityHeap->deallocate();
8752allocate_failed:
8753 delete capabilityHeap;
8754
8755 if (rc != NO_ERROR) {
8756 return NULL;
8757 } else {
8758 return cap_ptr;
8759 }
8760}
8761
Thierry Strudel3d639192016-09-09 11:52:26 -07008762/*===========================================================================
8763 * FUNCTION : initCapabilities
8764 *
8765 * DESCRIPTION: initialize camera capabilities in static data struct
8766 *
8767 * PARAMETERS :
8768 * @cameraId : camera Id
8769 *
8770 * RETURN : int32_t type of status
8771 * NO_ERROR -- success
8772 * none-zero failure code
8773 *==========================================================================*/
8774int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8775{
8776 int rc = 0;
8777 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008778 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008779
8780 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8781 if (rc) {
8782 LOGE("camera_open failed. rc = %d", rc);
8783 goto open_failed;
8784 }
8785 if (!cameraHandle) {
8786 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8787 goto open_failed;
8788 }
8789
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008790 handle = get_main_camera_handle(cameraHandle->camera_handle);
8791 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8792 if (gCamCapability[cameraId] == NULL) {
8793 rc = FAILED_TRANSACTION;
8794 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008795 }
8796
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008797 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008798 if (is_dual_camera_by_idx(cameraId)) {
8799 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8800 gCamCapability[cameraId]->aux_cam_cap =
8801 getCapabilities(cameraHandle->ops, handle);
8802 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8803 rc = FAILED_TRANSACTION;
8804 free(gCamCapability[cameraId]);
8805 goto failed_op;
8806 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008807
8808 // Copy the main camera capability to main_cam_cap struct
8809 gCamCapability[cameraId]->main_cam_cap =
8810 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8811 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8812 LOGE("out of memory");
8813 rc = NO_MEMORY;
8814 goto failed_op;
8815 }
8816 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8817 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008818 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008819failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008820 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8821 cameraHandle = NULL;
8822open_failed:
8823 return rc;
8824}
8825
8826/*==========================================================================
8827 * FUNCTION : get3Aversion
8828 *
8829 * DESCRIPTION: get the Q3A S/W version
8830 *
8831 * PARAMETERS :
8832 * @sw_version: Reference of Q3A structure which will hold version info upon
8833 * return
8834 *
8835 * RETURN : None
8836 *
8837 *==========================================================================*/
8838void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8839{
8840 if(gCamCapability[mCameraId])
8841 sw_version = gCamCapability[mCameraId]->q3a_version;
8842 else
8843 LOGE("Capability structure NULL!");
8844}
8845
8846
8847/*===========================================================================
8848 * FUNCTION : initParameters
8849 *
8850 * DESCRIPTION: initialize camera parameters
8851 *
8852 * PARAMETERS :
8853 *
8854 * RETURN : int32_t type of status
8855 * NO_ERROR -- success
8856 * none-zero failure code
8857 *==========================================================================*/
8858int QCamera3HardwareInterface::initParameters()
8859{
8860 int rc = 0;
8861
8862 //Allocate Set Param Buffer
8863 mParamHeap = new QCamera3HeapMemory(1);
8864 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8865 if(rc != OK) {
8866 rc = NO_MEMORY;
8867 LOGE("Failed to allocate SETPARM Heap memory");
8868 delete mParamHeap;
8869 mParamHeap = NULL;
8870 return rc;
8871 }
8872
8873 //Map memory for parameters buffer
8874 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8875 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8876 mParamHeap->getFd(0),
8877 sizeof(metadata_buffer_t),
8878 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8879 if(rc < 0) {
8880 LOGE("failed to map SETPARM buffer");
8881 rc = FAILED_TRANSACTION;
8882 mParamHeap->deallocate();
8883 delete mParamHeap;
8884 mParamHeap = NULL;
8885 return rc;
8886 }
8887
8888 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8889
8890 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8891 return rc;
8892}
8893
8894/*===========================================================================
8895 * FUNCTION : deinitParameters
8896 *
8897 * DESCRIPTION: de-initialize camera parameters
8898 *
8899 * PARAMETERS :
8900 *
8901 * RETURN : NONE
8902 *==========================================================================*/
8903void QCamera3HardwareInterface::deinitParameters()
8904{
8905 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8906 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8907
8908 mParamHeap->deallocate();
8909 delete mParamHeap;
8910 mParamHeap = NULL;
8911
8912 mParameters = NULL;
8913
8914 free(mPrevParameters);
8915 mPrevParameters = NULL;
8916}
8917
8918/*===========================================================================
8919 * FUNCTION : calcMaxJpegSize
8920 *
8921 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8922 *
8923 * PARAMETERS :
8924 *
8925 * RETURN : max_jpeg_size
8926 *==========================================================================*/
8927size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8928{
8929 size_t max_jpeg_size = 0;
8930 size_t temp_width, temp_height;
8931 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8932 MAX_SIZES_CNT);
8933 for (size_t i = 0; i < count; i++) {
8934 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8935 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8936 if (temp_width * temp_height > max_jpeg_size ) {
8937 max_jpeg_size = temp_width * temp_height;
8938 }
8939 }
8940 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8941 return max_jpeg_size;
8942}
8943
8944/*===========================================================================
8945 * FUNCTION : getMaxRawSize
8946 *
8947 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8948 *
8949 * PARAMETERS :
8950 *
8951 * RETURN : Largest supported Raw Dimension
8952 *==========================================================================*/
8953cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8954{
8955 int max_width = 0;
8956 cam_dimension_t maxRawSize;
8957
8958 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8959 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8960 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8961 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8962 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8963 }
8964 }
8965 return maxRawSize;
8966}
8967
8968
8969/*===========================================================================
8970 * FUNCTION : calcMaxJpegDim
8971 *
8972 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8973 *
8974 * PARAMETERS :
8975 *
8976 * RETURN : max_jpeg_dim
8977 *==========================================================================*/
8978cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8979{
8980 cam_dimension_t max_jpeg_dim;
8981 cam_dimension_t curr_jpeg_dim;
8982 max_jpeg_dim.width = 0;
8983 max_jpeg_dim.height = 0;
8984 curr_jpeg_dim.width = 0;
8985 curr_jpeg_dim.height = 0;
8986 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8987 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8988 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8989 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8990 max_jpeg_dim.width * max_jpeg_dim.height ) {
8991 max_jpeg_dim.width = curr_jpeg_dim.width;
8992 max_jpeg_dim.height = curr_jpeg_dim.height;
8993 }
8994 }
8995 return max_jpeg_dim;
8996}
8997
8998/*===========================================================================
8999 * FUNCTION : addStreamConfig
9000 *
9001 * DESCRIPTION: adds the stream configuration to the array
9002 *
9003 * PARAMETERS :
9004 * @available_stream_configs : pointer to stream configuration array
9005 * @scalar_format : scalar format
9006 * @dim : configuration dimension
9007 * @config_type : input or output configuration type
9008 *
9009 * RETURN : NONE
9010 *==========================================================================*/
9011void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9012 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9013{
9014 available_stream_configs.add(scalar_format);
9015 available_stream_configs.add(dim.width);
9016 available_stream_configs.add(dim.height);
9017 available_stream_configs.add(config_type);
9018}
9019
9020/*===========================================================================
9021 * FUNCTION : suppportBurstCapture
9022 *
9023 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9024 *
9025 * PARAMETERS :
9026 * @cameraId : camera Id
9027 *
9028 * RETURN : true if camera supports BURST_CAPTURE
9029 * false otherwise
9030 *==========================================================================*/
9031bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9032{
9033 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9034 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9035 const int32_t highResWidth = 3264;
9036 const int32_t highResHeight = 2448;
9037
9038 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9039 // Maximum resolution images cannot be captured at >= 10fps
9040 // -> not supporting BURST_CAPTURE
9041 return false;
9042 }
9043
9044 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9045 // Maximum resolution images can be captured at >= 20fps
9046 // --> supporting BURST_CAPTURE
9047 return true;
9048 }
9049
9050 // Find the smallest highRes resolution, or largest resolution if there is none
9051 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9052 MAX_SIZES_CNT);
9053 size_t highRes = 0;
9054 while ((highRes + 1 < totalCnt) &&
9055 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9056 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9057 highResWidth * highResHeight)) {
9058 highRes++;
9059 }
9060 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9061 return true;
9062 } else {
9063 return false;
9064 }
9065}
9066
9067/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009068 * FUNCTION : getPDStatIndex
9069 *
9070 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9071 *
9072 * PARAMETERS :
9073 * @caps : camera capabilities
9074 *
9075 * RETURN : int32_t type
9076 * non-negative - on success
9077 * -1 - on failure
9078 *==========================================================================*/
9079int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9080 if (nullptr == caps) {
9081 return -1;
9082 }
9083
9084 uint32_t metaRawCount = caps->meta_raw_channel_count;
9085 int32_t ret = -1;
9086 for (size_t i = 0; i < metaRawCount; i++) {
9087 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9088 ret = i;
9089 break;
9090 }
9091 }
9092
9093 return ret;
9094}
9095
9096/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009097 * FUNCTION : initStaticMetadata
9098 *
9099 * DESCRIPTION: initialize the static metadata
9100 *
9101 * PARAMETERS :
9102 * @cameraId : camera Id
9103 *
9104 * RETURN : int32_t type of status
9105 * 0 -- success
9106 * non-zero failure code
9107 *==========================================================================*/
9108int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9109{
9110 int rc = 0;
9111 CameraMetadata staticInfo;
9112 size_t count = 0;
9113 bool limitedDevice = false;
9114 char prop[PROPERTY_VALUE_MAX];
9115 bool supportBurst = false;
9116
9117 supportBurst = supportBurstCapture(cameraId);
9118
9119 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9120 * guaranteed or if min fps of max resolution is less than 20 fps, its
9121 * advertised as limited device*/
9122 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9123 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9124 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9125 !supportBurst;
9126
9127 uint8_t supportedHwLvl = limitedDevice ?
9128 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009129#ifndef USE_HAL_3_3
9130 // LEVEL_3 - This device will support level 3.
9131 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9132#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009133 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009134#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009135
9136 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9137 &supportedHwLvl, 1);
9138
9139 bool facingBack = false;
9140 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9141 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9142 facingBack = true;
9143 }
9144 /*HAL 3 only*/
9145 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9146 &gCamCapability[cameraId]->min_focus_distance, 1);
9147
9148 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9149 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9150
9151 /*should be using focal lengths but sensor doesn't provide that info now*/
9152 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9153 &gCamCapability[cameraId]->focal_length,
9154 1);
9155
9156 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9157 gCamCapability[cameraId]->apertures,
9158 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9159
9160 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9161 gCamCapability[cameraId]->filter_densities,
9162 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9163
9164
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009165 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9166 size_t mode_count =
9167 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9168 for (size_t i = 0; i < mode_count; i++) {
9169 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9170 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009171 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009172 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009173
9174 int32_t lens_shading_map_size[] = {
9175 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9176 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9177 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9178 lens_shading_map_size,
9179 sizeof(lens_shading_map_size)/sizeof(int32_t));
9180
9181 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9182 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9183
9184 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9185 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9186
9187 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9188 &gCamCapability[cameraId]->max_frame_duration, 1);
9189
9190 camera_metadata_rational baseGainFactor = {
9191 gCamCapability[cameraId]->base_gain_factor.numerator,
9192 gCamCapability[cameraId]->base_gain_factor.denominator};
9193 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9194 &baseGainFactor, 1);
9195
9196 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9197 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9198
9199 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9200 gCamCapability[cameraId]->pixel_array_size.height};
9201 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9202 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9203
9204 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9205 gCamCapability[cameraId]->active_array_size.top,
9206 gCamCapability[cameraId]->active_array_size.width,
9207 gCamCapability[cameraId]->active_array_size.height};
9208 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9209 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9210
9211 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9212 &gCamCapability[cameraId]->white_level, 1);
9213
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009214 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9215 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9216 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009217 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009218 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009219
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009220#ifndef USE_HAL_3_3
9221 bool hasBlackRegions = false;
9222 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9223 LOGW("black_region_count: %d is bounded to %d",
9224 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9225 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9226 }
9227 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9228 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9229 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9230 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9231 }
9232 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9233 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9234 hasBlackRegions = true;
9235 }
9236#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009237 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9238 &gCamCapability[cameraId]->flash_charge_duration, 1);
9239
9240 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9241 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9242
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009243 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9244 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9245 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009246 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9247 &timestampSource, 1);
9248
Thierry Strudel54dc9782017-02-15 12:12:10 -08009249 //update histogram vendor data
9250 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009251 &gCamCapability[cameraId]->histogram_size, 1);
9252
Thierry Strudel54dc9782017-02-15 12:12:10 -08009253 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009254 &gCamCapability[cameraId]->max_histogram_count, 1);
9255
Shuzhen Wang14415f52016-11-16 18:26:18 -08009256 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9257 //so that app can request fewer number of bins than the maximum supported.
9258 std::vector<int32_t> histBins;
9259 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9260 histBins.push_back(maxHistBins);
9261 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9262 (maxHistBins & 0x1) == 0) {
9263 histBins.push_back(maxHistBins >> 1);
9264 maxHistBins >>= 1;
9265 }
9266 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9267 histBins.data(), histBins.size());
9268
Thierry Strudel3d639192016-09-09 11:52:26 -07009269 int32_t sharpness_map_size[] = {
9270 gCamCapability[cameraId]->sharpness_map_size.width,
9271 gCamCapability[cameraId]->sharpness_map_size.height};
9272
9273 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9274 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9275
9276 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9277 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9278
Emilian Peev0f3c3162017-03-15 12:57:46 +00009279 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9280 if (0 <= indexPD) {
9281 // Advertise PD stats data as part of the Depth capabilities
9282 int32_t depthWidth =
9283 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9284 int32_t depthHeight =
9285 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009286 int32_t depthStride =
9287 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009288 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9289 assert(0 < depthSamplesCount);
9290 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9291 &depthSamplesCount, 1);
9292
9293 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9294 depthHeight,
9295 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9296 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9297 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9298 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9299 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9300
9301 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9302 depthHeight, 33333333,
9303 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9304 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9305 depthMinDuration,
9306 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9307
9308 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9309 depthHeight, 0,
9310 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9311 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9312 depthStallDuration,
9313 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9314
9315 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9316 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009317
9318 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9319 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9320 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009321 }
9322
Thierry Strudel3d639192016-09-09 11:52:26 -07009323 int32_t scalar_formats[] = {
9324 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9325 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9326 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9327 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9328 HAL_PIXEL_FORMAT_RAW10,
9329 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009330 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9331 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9332 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009333
9334 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9335 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9336 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9337 count, MAX_SIZES_CNT, available_processed_sizes);
9338 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9339 available_processed_sizes, count * 2);
9340
9341 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9342 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9343 makeTable(gCamCapability[cameraId]->raw_dim,
9344 count, MAX_SIZES_CNT, available_raw_sizes);
9345 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9346 available_raw_sizes, count * 2);
9347
9348 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9349 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9350 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9351 count, MAX_SIZES_CNT, available_fps_ranges);
9352 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9353 available_fps_ranges, count * 2);
9354
9355 camera_metadata_rational exposureCompensationStep = {
9356 gCamCapability[cameraId]->exp_compensation_step.numerator,
9357 gCamCapability[cameraId]->exp_compensation_step.denominator};
9358 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9359 &exposureCompensationStep, 1);
9360
9361 Vector<uint8_t> availableVstabModes;
9362 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9363 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009364 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009365 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009366 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009367 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009368 count = IS_TYPE_MAX;
9369 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9370 for (size_t i = 0; i < count; i++) {
9371 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9372 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9373 eisSupported = true;
9374 break;
9375 }
9376 }
9377 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009378 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9379 }
9380 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9381 availableVstabModes.array(), availableVstabModes.size());
9382
9383 /*HAL 1 and HAL 3 common*/
9384 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9385 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9386 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009387 // Cap the max zoom to the max preferred value
9388 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009389 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9390 &maxZoom, 1);
9391
9392 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9393 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9394
9395 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9396 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9397 max3aRegions[2] = 0; /* AF not supported */
9398 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9399 max3aRegions, 3);
9400
9401 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9402 memset(prop, 0, sizeof(prop));
9403 property_get("persist.camera.facedetect", prop, "1");
9404 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9405 LOGD("Support face detection mode: %d",
9406 supportedFaceDetectMode);
9407
9408 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009409 /* support mode should be OFF if max number of face is 0 */
9410 if (maxFaces <= 0) {
9411 supportedFaceDetectMode = 0;
9412 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009413 Vector<uint8_t> availableFaceDetectModes;
9414 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9415 if (supportedFaceDetectMode == 1) {
9416 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9417 } else if (supportedFaceDetectMode == 2) {
9418 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9419 } else if (supportedFaceDetectMode == 3) {
9420 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9421 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9422 } else {
9423 maxFaces = 0;
9424 }
9425 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9426 availableFaceDetectModes.array(),
9427 availableFaceDetectModes.size());
9428 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9429 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009430 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9431 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9432 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009433
9434 int32_t exposureCompensationRange[] = {
9435 gCamCapability[cameraId]->exposure_compensation_min,
9436 gCamCapability[cameraId]->exposure_compensation_max};
9437 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9438 exposureCompensationRange,
9439 sizeof(exposureCompensationRange)/sizeof(int32_t));
9440
9441 uint8_t lensFacing = (facingBack) ?
9442 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9443 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9444
9445 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9446 available_thumbnail_sizes,
9447 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9448
9449 /*all sizes will be clubbed into this tag*/
9450 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9451 /*android.scaler.availableStreamConfigurations*/
9452 Vector<int32_t> available_stream_configs;
9453 cam_dimension_t active_array_dim;
9454 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9455 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009456
9457 /*advertise list of input dimensions supported based on below property.
9458 By default all sizes upto 5MP will be advertised.
9459 Note that the setprop resolution format should be WxH.
9460 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9461 To list all supported sizes, setprop needs to be set with "0x0" */
9462 cam_dimension_t minInputSize = {2592,1944}; //5MP
9463 memset(prop, 0, sizeof(prop));
9464 property_get("persist.camera.input.minsize", prop, "2592x1944");
9465 if (strlen(prop) > 0) {
9466 char *saveptr = NULL;
9467 char *token = strtok_r(prop, "x", &saveptr);
9468 if (token != NULL) {
9469 minInputSize.width = atoi(token);
9470 }
9471 token = strtok_r(NULL, "x", &saveptr);
9472 if (token != NULL) {
9473 minInputSize.height = atoi(token);
9474 }
9475 }
9476
Thierry Strudel3d639192016-09-09 11:52:26 -07009477 /* Add input/output stream configurations for each scalar formats*/
9478 for (size_t j = 0; j < scalar_formats_count; j++) {
9479 switch (scalar_formats[j]) {
9480 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9481 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9482 case HAL_PIXEL_FORMAT_RAW10:
9483 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9484 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9485 addStreamConfig(available_stream_configs, scalar_formats[j],
9486 gCamCapability[cameraId]->raw_dim[i],
9487 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9488 }
9489 break;
9490 case HAL_PIXEL_FORMAT_BLOB:
9491 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9492 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9493 addStreamConfig(available_stream_configs, scalar_formats[j],
9494 gCamCapability[cameraId]->picture_sizes_tbl[i],
9495 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9496 }
9497 break;
9498 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9499 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9500 default:
9501 cam_dimension_t largest_picture_size;
9502 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9503 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9504 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9505 addStreamConfig(available_stream_configs, scalar_formats[j],
9506 gCamCapability[cameraId]->picture_sizes_tbl[i],
9507 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009508 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009509 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9510 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009511 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9512 >= minInputSize.width) || (gCamCapability[cameraId]->
9513 picture_sizes_tbl[i].height >= minInputSize.height)) {
9514 addStreamConfig(available_stream_configs, scalar_formats[j],
9515 gCamCapability[cameraId]->picture_sizes_tbl[i],
9516 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9517 }
9518 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009519 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009520
Thierry Strudel3d639192016-09-09 11:52:26 -07009521 break;
9522 }
9523 }
9524
9525 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9526 available_stream_configs.array(), available_stream_configs.size());
9527 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9528 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9529
9530 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9531 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9532
9533 /* android.scaler.availableMinFrameDurations */
9534 Vector<int64_t> available_min_durations;
9535 for (size_t j = 0; j < scalar_formats_count; j++) {
9536 switch (scalar_formats[j]) {
9537 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9538 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9539 case HAL_PIXEL_FORMAT_RAW10:
9540 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9541 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9542 available_min_durations.add(scalar_formats[j]);
9543 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9544 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9545 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9546 }
9547 break;
9548 default:
9549 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9550 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9551 available_min_durations.add(scalar_formats[j]);
9552 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9553 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9554 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9555 }
9556 break;
9557 }
9558 }
9559 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9560 available_min_durations.array(), available_min_durations.size());
9561
9562 Vector<int32_t> available_hfr_configs;
9563 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9564 int32_t fps = 0;
9565 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9566 case CAM_HFR_MODE_60FPS:
9567 fps = 60;
9568 break;
9569 case CAM_HFR_MODE_90FPS:
9570 fps = 90;
9571 break;
9572 case CAM_HFR_MODE_120FPS:
9573 fps = 120;
9574 break;
9575 case CAM_HFR_MODE_150FPS:
9576 fps = 150;
9577 break;
9578 case CAM_HFR_MODE_180FPS:
9579 fps = 180;
9580 break;
9581 case CAM_HFR_MODE_210FPS:
9582 fps = 210;
9583 break;
9584 case CAM_HFR_MODE_240FPS:
9585 fps = 240;
9586 break;
9587 case CAM_HFR_MODE_480FPS:
9588 fps = 480;
9589 break;
9590 case CAM_HFR_MODE_OFF:
9591 case CAM_HFR_MODE_MAX:
9592 default:
9593 break;
9594 }
9595
9596 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9597 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9598 /* For each HFR frame rate, need to advertise one variable fps range
9599 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9600 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9601 * set by the app. When video recording is started, [120, 120] is
9602 * set. This way sensor configuration does not change when recording
9603 * is started */
9604
9605 /* (width, height, fps_min, fps_max, batch_size_max) */
9606 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9607 j < MAX_SIZES_CNT; j++) {
9608 available_hfr_configs.add(
9609 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9610 available_hfr_configs.add(
9611 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9612 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9613 available_hfr_configs.add(fps);
9614 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9615
9616 /* (width, height, fps_min, fps_max, batch_size_max) */
9617 available_hfr_configs.add(
9618 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9619 available_hfr_configs.add(
9620 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9621 available_hfr_configs.add(fps);
9622 available_hfr_configs.add(fps);
9623 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9624 }
9625 }
9626 }
9627 //Advertise HFR capability only if the property is set
9628 memset(prop, 0, sizeof(prop));
9629 property_get("persist.camera.hal3hfr.enable", prop, "1");
9630 uint8_t hfrEnable = (uint8_t)atoi(prop);
9631
9632 if(hfrEnable && available_hfr_configs.array()) {
9633 staticInfo.update(
9634 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9635 available_hfr_configs.array(), available_hfr_configs.size());
9636 }
9637
9638 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9639 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9640 &max_jpeg_size, 1);
9641
9642 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9643 size_t size = 0;
9644 count = CAM_EFFECT_MODE_MAX;
9645 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9646 for (size_t i = 0; i < count; i++) {
9647 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9648 gCamCapability[cameraId]->supported_effects[i]);
9649 if (NAME_NOT_FOUND != val) {
9650 avail_effects[size] = (uint8_t)val;
9651 size++;
9652 }
9653 }
9654 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9655 avail_effects,
9656 size);
9657
9658 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9659 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9660 size_t supported_scene_modes_cnt = 0;
9661 count = CAM_SCENE_MODE_MAX;
9662 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9663 for (size_t i = 0; i < count; i++) {
9664 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9665 CAM_SCENE_MODE_OFF) {
9666 int val = lookupFwkName(SCENE_MODES_MAP,
9667 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9668 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009669
Thierry Strudel3d639192016-09-09 11:52:26 -07009670 if (NAME_NOT_FOUND != val) {
9671 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9672 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9673 supported_scene_modes_cnt++;
9674 }
9675 }
9676 }
9677 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9678 avail_scene_modes,
9679 supported_scene_modes_cnt);
9680
9681 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9682 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9683 supported_scene_modes_cnt,
9684 CAM_SCENE_MODE_MAX,
9685 scene_mode_overrides,
9686 supported_indexes,
9687 cameraId);
9688
9689 if (supported_scene_modes_cnt == 0) {
9690 supported_scene_modes_cnt = 1;
9691 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9692 }
9693
9694 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9695 scene_mode_overrides, supported_scene_modes_cnt * 3);
9696
9697 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9698 ANDROID_CONTROL_MODE_AUTO,
9699 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9700 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9701 available_control_modes,
9702 3);
9703
9704 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9705 size = 0;
9706 count = CAM_ANTIBANDING_MODE_MAX;
9707 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9708 for (size_t i = 0; i < count; i++) {
9709 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9710 gCamCapability[cameraId]->supported_antibandings[i]);
9711 if (NAME_NOT_FOUND != val) {
9712 avail_antibanding_modes[size] = (uint8_t)val;
9713 size++;
9714 }
9715
9716 }
9717 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9718 avail_antibanding_modes,
9719 size);
9720
9721 uint8_t avail_abberation_modes[] = {
9722 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9723 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9724 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9725 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9726 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9727 if (0 == count) {
9728 // If no aberration correction modes are available for a device, this advertise OFF mode
9729 size = 1;
9730 } else {
9731 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9732 // So, advertize all 3 modes if atleast any one mode is supported as per the
9733 // new M requirement
9734 size = 3;
9735 }
9736 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9737 avail_abberation_modes,
9738 size);
9739
9740 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9741 size = 0;
9742 count = CAM_FOCUS_MODE_MAX;
9743 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9744 for (size_t i = 0; i < count; i++) {
9745 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9746 gCamCapability[cameraId]->supported_focus_modes[i]);
9747 if (NAME_NOT_FOUND != val) {
9748 avail_af_modes[size] = (uint8_t)val;
9749 size++;
9750 }
9751 }
9752 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9753 avail_af_modes,
9754 size);
9755
9756 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9757 size = 0;
9758 count = CAM_WB_MODE_MAX;
9759 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9760 for (size_t i = 0; i < count; i++) {
9761 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9762 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9763 gCamCapability[cameraId]->supported_white_balances[i]);
9764 if (NAME_NOT_FOUND != val) {
9765 avail_awb_modes[size] = (uint8_t)val;
9766 size++;
9767 }
9768 }
9769 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9770 avail_awb_modes,
9771 size);
9772
9773 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9774 count = CAM_FLASH_FIRING_LEVEL_MAX;
9775 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9776 count);
9777 for (size_t i = 0; i < count; i++) {
9778 available_flash_levels[i] =
9779 gCamCapability[cameraId]->supported_firing_levels[i];
9780 }
9781 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9782 available_flash_levels, count);
9783
9784 uint8_t flashAvailable;
9785 if (gCamCapability[cameraId]->flash_available)
9786 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9787 else
9788 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9789 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9790 &flashAvailable, 1);
9791
9792 Vector<uint8_t> avail_ae_modes;
9793 count = CAM_AE_MODE_MAX;
9794 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9795 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009796 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9797 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9798 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9799 }
9800 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009801 }
9802 if (flashAvailable) {
9803 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9804 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9805 }
9806 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9807 avail_ae_modes.array(),
9808 avail_ae_modes.size());
9809
9810 int32_t sensitivity_range[2];
9811 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9812 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9813 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9814 sensitivity_range,
9815 sizeof(sensitivity_range) / sizeof(int32_t));
9816
9817 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9818 &gCamCapability[cameraId]->max_analog_sensitivity,
9819 1);
9820
9821 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9822 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9823 &sensor_orientation,
9824 1);
9825
9826 int32_t max_output_streams[] = {
9827 MAX_STALLING_STREAMS,
9828 MAX_PROCESSED_STREAMS,
9829 MAX_RAW_STREAMS};
9830 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9831 max_output_streams,
9832 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9833
9834 uint8_t avail_leds = 0;
9835 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9836 &avail_leds, 0);
9837
9838 uint8_t focus_dist_calibrated;
9839 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9840 gCamCapability[cameraId]->focus_dist_calibrated);
9841 if (NAME_NOT_FOUND != val) {
9842 focus_dist_calibrated = (uint8_t)val;
9843 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9844 &focus_dist_calibrated, 1);
9845 }
9846
9847 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9848 size = 0;
9849 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9850 MAX_TEST_PATTERN_CNT);
9851 for (size_t i = 0; i < count; i++) {
9852 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9853 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9854 if (NAME_NOT_FOUND != testpatternMode) {
9855 avail_testpattern_modes[size] = testpatternMode;
9856 size++;
9857 }
9858 }
9859 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9860 avail_testpattern_modes,
9861 size);
9862
9863 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9864 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9865 &max_pipeline_depth,
9866 1);
9867
9868 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9869 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9870 &partial_result_count,
9871 1);
9872
9873 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9874 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9875
9876 Vector<uint8_t> available_capabilities;
9877 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9878 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9879 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9880 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9881 if (supportBurst) {
9882 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9883 }
9884 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9885 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9886 if (hfrEnable && available_hfr_configs.array()) {
9887 available_capabilities.add(
9888 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9889 }
9890
9891 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9892 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9893 }
9894 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9895 available_capabilities.array(),
9896 available_capabilities.size());
9897
9898 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9899 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9900 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9901 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9902
9903 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9904 &aeLockAvailable, 1);
9905
9906 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9907 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9908 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9909 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9910
9911 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9912 &awbLockAvailable, 1);
9913
9914 int32_t max_input_streams = 1;
9915 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9916 &max_input_streams,
9917 1);
9918
9919 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9920 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9921 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9922 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9923 HAL_PIXEL_FORMAT_YCbCr_420_888};
9924 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9925 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9926
9927 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9928 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9929 &max_latency,
9930 1);
9931
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009932#ifndef USE_HAL_3_3
9933 int32_t isp_sensitivity_range[2];
9934 isp_sensitivity_range[0] =
9935 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9936 isp_sensitivity_range[1] =
9937 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9938 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9939 isp_sensitivity_range,
9940 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9941#endif
9942
Thierry Strudel3d639192016-09-09 11:52:26 -07009943 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9944 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9945 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9946 available_hot_pixel_modes,
9947 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9948
9949 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9950 ANDROID_SHADING_MODE_FAST,
9951 ANDROID_SHADING_MODE_HIGH_QUALITY};
9952 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9953 available_shading_modes,
9954 3);
9955
9956 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9957 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9958 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9959 available_lens_shading_map_modes,
9960 2);
9961
9962 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9963 ANDROID_EDGE_MODE_FAST,
9964 ANDROID_EDGE_MODE_HIGH_QUALITY,
9965 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9966 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9967 available_edge_modes,
9968 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9969
9970 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9971 ANDROID_NOISE_REDUCTION_MODE_FAST,
9972 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9973 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9974 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9975 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9976 available_noise_red_modes,
9977 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9978
9979 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9980 ANDROID_TONEMAP_MODE_FAST,
9981 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9982 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9983 available_tonemap_modes,
9984 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9985
9986 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9987 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9988 available_hot_pixel_map_modes,
9989 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9990
9991 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9992 gCamCapability[cameraId]->reference_illuminant1);
9993 if (NAME_NOT_FOUND != val) {
9994 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9995 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9996 }
9997
9998 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9999 gCamCapability[cameraId]->reference_illuminant2);
10000 if (NAME_NOT_FOUND != val) {
10001 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10002 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10003 }
10004
10005 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10006 (void *)gCamCapability[cameraId]->forward_matrix1,
10007 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10008
10009 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10010 (void *)gCamCapability[cameraId]->forward_matrix2,
10011 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10012
10013 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10014 (void *)gCamCapability[cameraId]->color_transform1,
10015 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10016
10017 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10018 (void *)gCamCapability[cameraId]->color_transform2,
10019 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10020
10021 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10022 (void *)gCamCapability[cameraId]->calibration_transform1,
10023 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10024
10025 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10026 (void *)gCamCapability[cameraId]->calibration_transform2,
10027 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10028
10029 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10030 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10031 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10032 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10033 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10034 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10035 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10036 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10037 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10038 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10039 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10040 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10041 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10042 ANDROID_JPEG_GPS_COORDINATES,
10043 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10044 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10045 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10046 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10047 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10048 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10049 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10050 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10051 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10052 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010053#ifndef USE_HAL_3_3
10054 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10055#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010056 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010057 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010058 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10059 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010060 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010061 /* DevCamDebug metadata request_keys_basic */
10062 DEVCAMDEBUG_META_ENABLE,
10063 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010064 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010065 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010066 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010067 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010068 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010069 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010070
10071 size_t request_keys_cnt =
10072 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10073 Vector<int32_t> available_request_keys;
10074 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10075 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10076 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10077 }
10078
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010079 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010080 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10081 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10082 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010083 }
10084
Thierry Strudel3d639192016-09-09 11:52:26 -070010085 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10086 available_request_keys.array(), available_request_keys.size());
10087
10088 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10089 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10090 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10091 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10092 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10093 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10094 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10095 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10096 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10097 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10098 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10099 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10100 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10101 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10102 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10103 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10104 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010105 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010106 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10107 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10108 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010109 ANDROID_STATISTICS_FACE_SCORES,
10110#ifndef USE_HAL_3_3
10111 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10112#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010113 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010114 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010115 // DevCamDebug metadata result_keys_basic
10116 DEVCAMDEBUG_META_ENABLE,
10117 // DevCamDebug metadata result_keys AF
10118 DEVCAMDEBUG_AF_LENS_POSITION,
10119 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10120 DEVCAMDEBUG_AF_TOF_DISTANCE,
10121 DEVCAMDEBUG_AF_LUMA,
10122 DEVCAMDEBUG_AF_HAF_STATE,
10123 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10124 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10125 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10126 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10127 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10128 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10129 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10130 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10131 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10132 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10133 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10134 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10135 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10136 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10137 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10138 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10139 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10140 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10141 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10142 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10143 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10144 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10145 // DevCamDebug metadata result_keys AEC
10146 DEVCAMDEBUG_AEC_TARGET_LUMA,
10147 DEVCAMDEBUG_AEC_COMP_LUMA,
10148 DEVCAMDEBUG_AEC_AVG_LUMA,
10149 DEVCAMDEBUG_AEC_CUR_LUMA,
10150 DEVCAMDEBUG_AEC_LINECOUNT,
10151 DEVCAMDEBUG_AEC_REAL_GAIN,
10152 DEVCAMDEBUG_AEC_EXP_INDEX,
10153 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010154 // DevCamDebug metadata result_keys zzHDR
10155 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10156 DEVCAMDEBUG_AEC_L_LINECOUNT,
10157 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10158 DEVCAMDEBUG_AEC_S_LINECOUNT,
10159 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10160 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10161 // DevCamDebug metadata result_keys ADRC
10162 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10163 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10164 DEVCAMDEBUG_AEC_GTM_RATIO,
10165 DEVCAMDEBUG_AEC_LTM_RATIO,
10166 DEVCAMDEBUG_AEC_LA_RATIO,
10167 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010168 // DevCamDebug metadata result_keys AWB
10169 DEVCAMDEBUG_AWB_R_GAIN,
10170 DEVCAMDEBUG_AWB_G_GAIN,
10171 DEVCAMDEBUG_AWB_B_GAIN,
10172 DEVCAMDEBUG_AWB_CCT,
10173 DEVCAMDEBUG_AWB_DECISION,
10174 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010175 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10176 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10177 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010178 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010179 };
10180
Thierry Strudel3d639192016-09-09 11:52:26 -070010181 size_t result_keys_cnt =
10182 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10183
10184 Vector<int32_t> available_result_keys;
10185 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10186 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10187 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10188 }
10189 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10190 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10191 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10192 }
10193 if (supportedFaceDetectMode == 1) {
10194 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10195 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10196 } else if ((supportedFaceDetectMode == 2) ||
10197 (supportedFaceDetectMode == 3)) {
10198 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10199 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10200 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010201#ifndef USE_HAL_3_3
10202 if (hasBlackRegions) {
10203 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10204 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10205 }
10206#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010207
10208 if (gExposeEnableZslKey) {
10209 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10210 }
10211
Thierry Strudel3d639192016-09-09 11:52:26 -070010212 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10213 available_result_keys.array(), available_result_keys.size());
10214
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010215 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010216 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10217 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10218 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10219 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10220 ANDROID_SCALER_CROPPING_TYPE,
10221 ANDROID_SYNC_MAX_LATENCY,
10222 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10223 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10224 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10225 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10226 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10227 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10228 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10229 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10230 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10231 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10232 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10233 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10234 ANDROID_LENS_FACING,
10235 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10236 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10237 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10238 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10239 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10240 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10241 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10242 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10243 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10244 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10245 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10246 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10247 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10248 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10249 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10250 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10251 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10252 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10253 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10254 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010255 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010256 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10257 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10258 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10259 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10260 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10261 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10262 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10263 ANDROID_CONTROL_AVAILABLE_MODES,
10264 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10265 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10266 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10267 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010268 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10269#ifndef USE_HAL_3_3
10270 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10271 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10272#endif
10273 };
10274
10275 Vector<int32_t> available_characteristics_keys;
10276 available_characteristics_keys.appendArray(characteristics_keys_basic,
10277 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10278#ifndef USE_HAL_3_3
10279 if (hasBlackRegions) {
10280 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10281 }
10282#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010283
10284 if (0 <= indexPD) {
10285 int32_t depthKeys[] = {
10286 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10287 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10288 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10289 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10290 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10291 };
10292 available_characteristics_keys.appendArray(depthKeys,
10293 sizeof(depthKeys) / sizeof(depthKeys[0]));
10294 }
10295
Thierry Strudel3d639192016-09-09 11:52:26 -070010296 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010297 available_characteristics_keys.array(),
10298 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010299
10300 /*available stall durations depend on the hw + sw and will be different for different devices */
10301 /*have to add for raw after implementation*/
10302 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10303 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10304
10305 Vector<int64_t> available_stall_durations;
10306 for (uint32_t j = 0; j < stall_formats_count; j++) {
10307 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10308 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10309 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10310 available_stall_durations.add(stall_formats[j]);
10311 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10312 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10313 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10314 }
10315 } else {
10316 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10317 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10318 available_stall_durations.add(stall_formats[j]);
10319 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10320 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10321 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10322 }
10323 }
10324 }
10325 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10326 available_stall_durations.array(),
10327 available_stall_durations.size());
10328
10329 //QCAMERA3_OPAQUE_RAW
10330 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10331 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10332 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10333 case LEGACY_RAW:
10334 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10335 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10336 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10337 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10338 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10339 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10340 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10341 break;
10342 case MIPI_RAW:
10343 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10344 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10345 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10346 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10347 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10348 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10349 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10350 break;
10351 default:
10352 LOGE("unknown opaque_raw_format %d",
10353 gCamCapability[cameraId]->opaque_raw_fmt);
10354 break;
10355 }
10356 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10357
10358 Vector<int32_t> strides;
10359 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10360 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10361 cam_stream_buf_plane_info_t buf_planes;
10362 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10363 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10364 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10365 &gCamCapability[cameraId]->padding_info, &buf_planes);
10366 strides.add(buf_planes.plane_info.mp[0].stride);
10367 }
10368 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10369 strides.size());
10370
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010371 //TBD: remove the following line once backend advertises zzHDR in feature mask
10372 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010373 //Video HDR default
10374 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10375 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010376 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010377 int32_t vhdr_mode[] = {
10378 QCAMERA3_VIDEO_HDR_MODE_OFF,
10379 QCAMERA3_VIDEO_HDR_MODE_ON};
10380
10381 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10382 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10383 vhdr_mode, vhdr_mode_count);
10384 }
10385
Thierry Strudel3d639192016-09-09 11:52:26 -070010386 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10387 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10388 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10389
10390 uint8_t isMonoOnly =
10391 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10392 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10393 &isMonoOnly, 1);
10394
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010395#ifndef USE_HAL_3_3
10396 Vector<int32_t> opaque_size;
10397 for (size_t j = 0; j < scalar_formats_count; j++) {
10398 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10399 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10400 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10401 cam_stream_buf_plane_info_t buf_planes;
10402
10403 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10404 &gCamCapability[cameraId]->padding_info, &buf_planes);
10405
10406 if (rc == 0) {
10407 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10408 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10409 opaque_size.add(buf_planes.plane_info.frame_len);
10410 }else {
10411 LOGE("raw frame calculation failed!");
10412 }
10413 }
10414 }
10415 }
10416
10417 if ((opaque_size.size() > 0) &&
10418 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10419 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10420 else
10421 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10422#endif
10423
Thierry Strudel04e026f2016-10-10 11:27:36 -070010424 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10425 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10426 size = 0;
10427 count = CAM_IR_MODE_MAX;
10428 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10429 for (size_t i = 0; i < count; i++) {
10430 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10431 gCamCapability[cameraId]->supported_ir_modes[i]);
10432 if (NAME_NOT_FOUND != val) {
10433 avail_ir_modes[size] = (int32_t)val;
10434 size++;
10435 }
10436 }
10437 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10438 avail_ir_modes, size);
10439 }
10440
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010441 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10442 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10443 size = 0;
10444 count = CAM_AEC_CONVERGENCE_MAX;
10445 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10446 for (size_t i = 0; i < count; i++) {
10447 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10448 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10449 if (NAME_NOT_FOUND != val) {
10450 available_instant_aec_modes[size] = (int32_t)val;
10451 size++;
10452 }
10453 }
10454 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10455 available_instant_aec_modes, size);
10456 }
10457
Thierry Strudel54dc9782017-02-15 12:12:10 -080010458 int32_t sharpness_range[] = {
10459 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10460 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10461 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10462
10463 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10464 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10465 size = 0;
10466 count = CAM_BINNING_CORRECTION_MODE_MAX;
10467 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10468 for (size_t i = 0; i < count; i++) {
10469 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10470 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10471 gCamCapability[cameraId]->supported_binning_modes[i]);
10472 if (NAME_NOT_FOUND != val) {
10473 avail_binning_modes[size] = (int32_t)val;
10474 size++;
10475 }
10476 }
10477 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10478 avail_binning_modes, size);
10479 }
10480
10481 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10482 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10483 size = 0;
10484 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10485 for (size_t i = 0; i < count; i++) {
10486 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10487 gCamCapability[cameraId]->supported_aec_modes[i]);
10488 if (NAME_NOT_FOUND != val)
10489 available_aec_modes[size++] = val;
10490 }
10491 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10492 available_aec_modes, size);
10493 }
10494
10495 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10496 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10497 size = 0;
10498 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10499 for (size_t i = 0; i < count; i++) {
10500 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10501 gCamCapability[cameraId]->supported_iso_modes[i]);
10502 if (NAME_NOT_FOUND != val)
10503 available_iso_modes[size++] = val;
10504 }
10505 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10506 available_iso_modes, size);
10507 }
10508
10509 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010510 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010511 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10512 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10513 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10514
10515 int32_t available_saturation_range[4];
10516 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10517 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10518 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10519 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10520 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10521 available_saturation_range, 4);
10522
10523 uint8_t is_hdr_values[2];
10524 is_hdr_values[0] = 0;
10525 is_hdr_values[1] = 1;
10526 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10527 is_hdr_values, 2);
10528
10529 float is_hdr_confidence_range[2];
10530 is_hdr_confidence_range[0] = 0.0;
10531 is_hdr_confidence_range[1] = 1.0;
10532 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10533 is_hdr_confidence_range, 2);
10534
Emilian Peev0a972ef2017-03-16 10:25:53 +000010535 size_t eepromLength = strnlen(
10536 reinterpret_cast<const char *>(
10537 gCamCapability[cameraId]->eeprom_version_info),
10538 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10539 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010540 char easelInfo[] = ",E:N";
10541 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10542 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10543 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010544 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10545 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010546 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010547 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10548 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10549 }
10550
Thierry Strudel3d639192016-09-09 11:52:26 -070010551 gStaticMetadata[cameraId] = staticInfo.release();
10552 return rc;
10553}
10554
10555/*===========================================================================
10556 * FUNCTION : makeTable
10557 *
10558 * DESCRIPTION: make a table of sizes
10559 *
10560 * PARAMETERS :
10561 *
10562 *
10563 *==========================================================================*/
10564void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10565 size_t max_size, int32_t *sizeTable)
10566{
10567 size_t j = 0;
10568 if (size > max_size) {
10569 size = max_size;
10570 }
10571 for (size_t i = 0; i < size; i++) {
10572 sizeTable[j] = dimTable[i].width;
10573 sizeTable[j+1] = dimTable[i].height;
10574 j+=2;
10575 }
10576}
10577
10578/*===========================================================================
10579 * FUNCTION : makeFPSTable
10580 *
10581 * DESCRIPTION: make a table of fps ranges
10582 *
10583 * PARAMETERS :
10584 *
10585 *==========================================================================*/
10586void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10587 size_t max_size, int32_t *fpsRangesTable)
10588{
10589 size_t j = 0;
10590 if (size > max_size) {
10591 size = max_size;
10592 }
10593 for (size_t i = 0; i < size; i++) {
10594 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10595 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10596 j+=2;
10597 }
10598}
10599
10600/*===========================================================================
10601 * FUNCTION : makeOverridesList
10602 *
10603 * DESCRIPTION: make a list of scene mode overrides
10604 *
10605 * PARAMETERS :
10606 *
10607 *
10608 *==========================================================================*/
10609void QCamera3HardwareInterface::makeOverridesList(
10610 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10611 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10612{
10613 /*daemon will give a list of overrides for all scene modes.
10614 However we should send the fwk only the overrides for the scene modes
10615 supported by the framework*/
10616 size_t j = 0;
10617 if (size > max_size) {
10618 size = max_size;
10619 }
10620 size_t focus_count = CAM_FOCUS_MODE_MAX;
10621 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10622 focus_count);
10623 for (size_t i = 0; i < size; i++) {
10624 bool supt = false;
10625 size_t index = supported_indexes[i];
10626 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10627 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10628 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10629 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10630 overridesTable[index].awb_mode);
10631 if (NAME_NOT_FOUND != val) {
10632 overridesList[j+1] = (uint8_t)val;
10633 }
10634 uint8_t focus_override = overridesTable[index].af_mode;
10635 for (size_t k = 0; k < focus_count; k++) {
10636 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10637 supt = true;
10638 break;
10639 }
10640 }
10641 if (supt) {
10642 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10643 focus_override);
10644 if (NAME_NOT_FOUND != val) {
10645 overridesList[j+2] = (uint8_t)val;
10646 }
10647 } else {
10648 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10649 }
10650 j+=3;
10651 }
10652}
10653
10654/*===========================================================================
10655 * FUNCTION : filterJpegSizes
10656 *
10657 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10658 * could be downscaled to
10659 *
10660 * PARAMETERS :
10661 *
10662 * RETURN : length of jpegSizes array
10663 *==========================================================================*/
10664
10665size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10666 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10667 uint8_t downscale_factor)
10668{
10669 if (0 == downscale_factor) {
10670 downscale_factor = 1;
10671 }
10672
10673 int32_t min_width = active_array_size.width / downscale_factor;
10674 int32_t min_height = active_array_size.height / downscale_factor;
10675 size_t jpegSizesCnt = 0;
10676 if (processedSizesCnt > maxCount) {
10677 processedSizesCnt = maxCount;
10678 }
10679 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10680 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10681 jpegSizes[jpegSizesCnt] = processedSizes[i];
10682 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10683 jpegSizesCnt += 2;
10684 }
10685 }
10686 return jpegSizesCnt;
10687}
10688
10689/*===========================================================================
10690 * FUNCTION : computeNoiseModelEntryS
10691 *
10692 * DESCRIPTION: function to map a given sensitivity to the S noise
10693 * model parameters in the DNG noise model.
10694 *
10695 * PARAMETERS : sens : the sensor sensitivity
10696 *
10697 ** RETURN : S (sensor amplification) noise
10698 *
10699 *==========================================================================*/
10700double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10701 double s = gCamCapability[mCameraId]->gradient_S * sens +
10702 gCamCapability[mCameraId]->offset_S;
10703 return ((s < 0.0) ? 0.0 : s);
10704}
10705
10706/*===========================================================================
10707 * FUNCTION : computeNoiseModelEntryO
10708 *
10709 * DESCRIPTION: function to map a given sensitivity to the O noise
10710 * model parameters in the DNG noise model.
10711 *
10712 * PARAMETERS : sens : the sensor sensitivity
10713 *
10714 ** RETURN : O (sensor readout) noise
10715 *
10716 *==========================================================================*/
10717double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10718 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10719 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10720 1.0 : (1.0 * sens / max_analog_sens);
10721 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10722 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10723 return ((o < 0.0) ? 0.0 : o);
10724}
10725
10726/*===========================================================================
10727 * FUNCTION : getSensorSensitivity
10728 *
10729 * DESCRIPTION: convert iso_mode to an integer value
10730 *
10731 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10732 *
10733 ** RETURN : sensitivity supported by sensor
10734 *
10735 *==========================================================================*/
10736int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10737{
10738 int32_t sensitivity;
10739
10740 switch (iso_mode) {
10741 case CAM_ISO_MODE_100:
10742 sensitivity = 100;
10743 break;
10744 case CAM_ISO_MODE_200:
10745 sensitivity = 200;
10746 break;
10747 case CAM_ISO_MODE_400:
10748 sensitivity = 400;
10749 break;
10750 case CAM_ISO_MODE_800:
10751 sensitivity = 800;
10752 break;
10753 case CAM_ISO_MODE_1600:
10754 sensitivity = 1600;
10755 break;
10756 default:
10757 sensitivity = -1;
10758 break;
10759 }
10760 return sensitivity;
10761}
10762
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010763int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010764 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010765 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10766 // to connect to Easel.
10767 bool doNotpowerOnEasel =
10768 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10769
10770 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010771 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10772 return OK;
10773 }
10774
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010775 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010776 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010777 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010778 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010779 return res;
10780 }
10781
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010782 EaselManagerClientOpened = true;
10783
10784 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010785 if (res != OK) {
10786 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10787 }
10788
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010789 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010790 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010791
10792 // Expose enableZsl key only when HDR+ mode is enabled.
10793 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010794 }
10795
10796 return OK;
10797}
10798
Thierry Strudel3d639192016-09-09 11:52:26 -070010799/*===========================================================================
10800 * FUNCTION : getCamInfo
10801 *
10802 * DESCRIPTION: query camera capabilities
10803 *
10804 * PARAMETERS :
10805 * @cameraId : camera Id
10806 * @info : camera info struct to be filled in with camera capabilities
10807 *
10808 * RETURN : int type of status
10809 * NO_ERROR -- success
10810 * none-zero failure code
10811 *==========================================================================*/
10812int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10813 struct camera_info *info)
10814{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010815 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010816 int rc = 0;
10817
10818 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010819
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010820 {
10821 Mutex::Autolock l(gHdrPlusClientLock);
10822 rc = initHdrPlusClientLocked();
10823 if (rc != OK) {
10824 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10825 pthread_mutex_unlock(&gCamLock);
10826 return rc;
10827 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010828 }
10829
Thierry Strudel3d639192016-09-09 11:52:26 -070010830 if (NULL == gCamCapability[cameraId]) {
10831 rc = initCapabilities(cameraId);
10832 if (rc < 0) {
10833 pthread_mutex_unlock(&gCamLock);
10834 return rc;
10835 }
10836 }
10837
10838 if (NULL == gStaticMetadata[cameraId]) {
10839 rc = initStaticMetadata(cameraId);
10840 if (rc < 0) {
10841 pthread_mutex_unlock(&gCamLock);
10842 return rc;
10843 }
10844 }
10845
10846 switch(gCamCapability[cameraId]->position) {
10847 case CAM_POSITION_BACK:
10848 case CAM_POSITION_BACK_AUX:
10849 info->facing = CAMERA_FACING_BACK;
10850 break;
10851
10852 case CAM_POSITION_FRONT:
10853 case CAM_POSITION_FRONT_AUX:
10854 info->facing = CAMERA_FACING_FRONT;
10855 break;
10856
10857 default:
10858 LOGE("Unknown position type %d for camera id:%d",
10859 gCamCapability[cameraId]->position, cameraId);
10860 rc = -1;
10861 break;
10862 }
10863
10864
10865 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010866#ifndef USE_HAL_3_3
10867 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10868#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010869 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010870#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010871 info->static_camera_characteristics = gStaticMetadata[cameraId];
10872
10873 //For now assume both cameras can operate independently.
10874 info->conflicting_devices = NULL;
10875 info->conflicting_devices_length = 0;
10876
10877 //resource cost is 100 * MIN(1.0, m/M),
10878 //where m is throughput requirement with maximum stream configuration
10879 //and M is CPP maximum throughput.
10880 float max_fps = 0.0;
10881 for (uint32_t i = 0;
10882 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10883 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10884 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10885 }
10886 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10887 gCamCapability[cameraId]->active_array_size.width *
10888 gCamCapability[cameraId]->active_array_size.height * max_fps /
10889 gCamCapability[cameraId]->max_pixel_bandwidth;
10890 info->resource_cost = 100 * MIN(1.0, ratio);
10891 LOGI("camera %d resource cost is %d", cameraId,
10892 info->resource_cost);
10893
10894 pthread_mutex_unlock(&gCamLock);
10895 return rc;
10896}
10897
10898/*===========================================================================
10899 * FUNCTION : translateCapabilityToMetadata
10900 *
10901 * DESCRIPTION: translate the capability into camera_metadata_t
10902 *
10903 * PARAMETERS : type of the request
10904 *
10905 *
10906 * RETURN : success: camera_metadata_t*
10907 * failure: NULL
10908 *
10909 *==========================================================================*/
10910camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10911{
10912 if (mDefaultMetadata[type] != NULL) {
10913 return mDefaultMetadata[type];
10914 }
10915 //first time we are handling this request
10916 //fill up the metadata structure using the wrapper class
10917 CameraMetadata settings;
10918 //translate from cam_capability_t to camera_metadata_tag_t
10919 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10920 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10921 int32_t defaultRequestID = 0;
10922 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10923
10924 /* OIS disable */
10925 char ois_prop[PROPERTY_VALUE_MAX];
10926 memset(ois_prop, 0, sizeof(ois_prop));
10927 property_get("persist.camera.ois.disable", ois_prop, "0");
10928 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10929
10930 /* Force video to use OIS */
10931 char videoOisProp[PROPERTY_VALUE_MAX];
10932 memset(videoOisProp, 0, sizeof(videoOisProp));
10933 property_get("persist.camera.ois.video", videoOisProp, "1");
10934 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010935
10936 // Hybrid AE enable/disable
10937 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10938 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10939 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10940 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10941
Thierry Strudel3d639192016-09-09 11:52:26 -070010942 uint8_t controlIntent = 0;
10943 uint8_t focusMode;
10944 uint8_t vsMode;
10945 uint8_t optStabMode;
10946 uint8_t cacMode;
10947 uint8_t edge_mode;
10948 uint8_t noise_red_mode;
10949 uint8_t tonemap_mode;
10950 bool highQualityModeEntryAvailable = FALSE;
10951 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010952 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010953 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10954 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010955 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010956 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010957 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010958
Thierry Strudel3d639192016-09-09 11:52:26 -070010959 switch (type) {
10960 case CAMERA3_TEMPLATE_PREVIEW:
10961 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10962 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10963 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10964 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10965 edge_mode = ANDROID_EDGE_MODE_FAST;
10966 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10967 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10968 break;
10969 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10970 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10971 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10972 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10973 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10974 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10975 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10976 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10977 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10978 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10979 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10980 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10981 highQualityModeEntryAvailable = TRUE;
10982 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10983 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10984 fastModeEntryAvailable = TRUE;
10985 }
10986 }
10987 if (highQualityModeEntryAvailable) {
10988 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10989 } else if (fastModeEntryAvailable) {
10990 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10991 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010992 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10993 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10994 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010995 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010996 break;
10997 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10998 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10999 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11000 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011001 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11002 edge_mode = ANDROID_EDGE_MODE_FAST;
11003 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11004 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11005 if (forceVideoOis)
11006 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11007 break;
11008 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11009 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11010 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11011 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011012 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11013 edge_mode = ANDROID_EDGE_MODE_FAST;
11014 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11015 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11016 if (forceVideoOis)
11017 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11018 break;
11019 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11020 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11021 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11022 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11023 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11024 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11025 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11026 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11027 break;
11028 case CAMERA3_TEMPLATE_MANUAL:
11029 edge_mode = ANDROID_EDGE_MODE_FAST;
11030 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11031 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11032 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11033 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11034 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11035 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11036 break;
11037 default:
11038 edge_mode = ANDROID_EDGE_MODE_FAST;
11039 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11040 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11041 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11042 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11043 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11044 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11045 break;
11046 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011047 // Set CAC to OFF if underlying device doesn't support
11048 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11049 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11050 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011051 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11052 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11053 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11054 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11055 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11056 }
11057 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011058 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011059 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011060
11061 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11062 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11063 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11064 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11065 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11066 || ois_disable)
11067 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11068 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011069 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011070
11071 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11072 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11073
11074 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11075 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11076
11077 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11078 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11079
11080 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11081 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11082
11083 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11084 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11085
11086 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11087 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11088
11089 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11090 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11091
11092 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11093 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11094
11095 /*flash*/
11096 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11097 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11098
11099 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11100 settings.update(ANDROID_FLASH_FIRING_POWER,
11101 &flashFiringLevel, 1);
11102
11103 /* lens */
11104 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11105 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11106
11107 if (gCamCapability[mCameraId]->filter_densities_count) {
11108 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11109 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11110 gCamCapability[mCameraId]->filter_densities_count);
11111 }
11112
11113 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11114 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11115
Thierry Strudel3d639192016-09-09 11:52:26 -070011116 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11117 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11118
11119 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11120 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11121
11122 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11123 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11124
11125 /* face detection (default to OFF) */
11126 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11127 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11128
Thierry Strudel54dc9782017-02-15 12:12:10 -080011129 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11130 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011131
11132 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11133 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11134
11135 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11136 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11137
Thierry Strudel3d639192016-09-09 11:52:26 -070011138
11139 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11140 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11141
11142 /* Exposure time(Update the Min Exposure Time)*/
11143 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11144 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11145
11146 /* frame duration */
11147 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11148 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11149
11150 /* sensitivity */
11151 static const int32_t default_sensitivity = 100;
11152 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011153#ifndef USE_HAL_3_3
11154 static const int32_t default_isp_sensitivity =
11155 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11156 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11157#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011158
11159 /*edge mode*/
11160 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11161
11162 /*noise reduction mode*/
11163 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11164
11165 /*color correction mode*/
11166 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11167 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11168
11169 /*transform matrix mode*/
11170 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11171
11172 int32_t scaler_crop_region[4];
11173 scaler_crop_region[0] = 0;
11174 scaler_crop_region[1] = 0;
11175 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11176 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11177 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11178
11179 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11180 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11181
11182 /*focus distance*/
11183 float focus_distance = 0.0;
11184 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11185
11186 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011187 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011188 float max_range = 0.0;
11189 float max_fixed_fps = 0.0;
11190 int32_t fps_range[2] = {0, 0};
11191 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11192 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011193 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11194 TEMPLATE_MAX_PREVIEW_FPS) {
11195 continue;
11196 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011197 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11198 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11199 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11200 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11201 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11202 if (range > max_range) {
11203 fps_range[0] =
11204 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11205 fps_range[1] =
11206 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11207 max_range = range;
11208 }
11209 } else {
11210 if (range < 0.01 && max_fixed_fps <
11211 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11212 fps_range[0] =
11213 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11214 fps_range[1] =
11215 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11216 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11217 }
11218 }
11219 }
11220 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11221
11222 /*precapture trigger*/
11223 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11224 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11225
11226 /*af trigger*/
11227 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11228 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11229
11230 /* ae & af regions */
11231 int32_t active_region[] = {
11232 gCamCapability[mCameraId]->active_array_size.left,
11233 gCamCapability[mCameraId]->active_array_size.top,
11234 gCamCapability[mCameraId]->active_array_size.left +
11235 gCamCapability[mCameraId]->active_array_size.width,
11236 gCamCapability[mCameraId]->active_array_size.top +
11237 gCamCapability[mCameraId]->active_array_size.height,
11238 0};
11239 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11240 sizeof(active_region) / sizeof(active_region[0]));
11241 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11242 sizeof(active_region) / sizeof(active_region[0]));
11243
11244 /* black level lock */
11245 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11246 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11247
Thierry Strudel3d639192016-09-09 11:52:26 -070011248 //special defaults for manual template
11249 if (type == CAMERA3_TEMPLATE_MANUAL) {
11250 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11251 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11252
11253 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11254 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11255
11256 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11257 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11258
11259 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11260 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11261
11262 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11263 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11264
11265 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11266 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11267 }
11268
11269
11270 /* TNR
11271 * We'll use this location to determine which modes TNR will be set.
11272 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11273 * This is not to be confused with linking on a per stream basis that decision
11274 * is still on per-session basis and will be handled as part of config stream
11275 */
11276 uint8_t tnr_enable = 0;
11277
11278 if (m_bTnrPreview || m_bTnrVideo) {
11279
11280 switch (type) {
11281 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11282 tnr_enable = 1;
11283 break;
11284
11285 default:
11286 tnr_enable = 0;
11287 break;
11288 }
11289
11290 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11291 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11292 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11293
11294 LOGD("TNR:%d with process plate %d for template:%d",
11295 tnr_enable, tnr_process_type, type);
11296 }
11297
11298 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011299 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011300 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11301
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011302 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011303 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11304
Shuzhen Wang920ea402017-05-03 08:49:39 -070011305 uint8_t related_camera_id = mCameraId;
11306 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011307
11308 /* CDS default */
11309 char prop[PROPERTY_VALUE_MAX];
11310 memset(prop, 0, sizeof(prop));
11311 property_get("persist.camera.CDS", prop, "Auto");
11312 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11313 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11314 if (CAM_CDS_MODE_MAX == cds_mode) {
11315 cds_mode = CAM_CDS_MODE_AUTO;
11316 }
11317
11318 /* Disabling CDS in templates which have TNR enabled*/
11319 if (tnr_enable)
11320 cds_mode = CAM_CDS_MODE_OFF;
11321
11322 int32_t mode = cds_mode;
11323 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011324
Thierry Strudel269c81a2016-10-12 12:13:59 -070011325 /* Manual Convergence AEC Speed is disabled by default*/
11326 float default_aec_speed = 0;
11327 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11328
11329 /* Manual Convergence AWB Speed is disabled by default*/
11330 float default_awb_speed = 0;
11331 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11332
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011333 // Set instant AEC to normal convergence by default
11334 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11335 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11336
Shuzhen Wang19463d72016-03-08 11:09:52 -080011337 /* hybrid ae */
11338 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11339
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011340 if (gExposeEnableZslKey) {
11341 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11342 }
11343
Thierry Strudel3d639192016-09-09 11:52:26 -070011344 mDefaultMetadata[type] = settings.release();
11345
11346 return mDefaultMetadata[type];
11347}
11348
11349/*===========================================================================
11350 * FUNCTION : setFrameParameters
11351 *
11352 * DESCRIPTION: set parameters per frame as requested in the metadata from
11353 * framework
11354 *
11355 * PARAMETERS :
11356 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011357 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011358 * @blob_request: Whether this request is a blob request or not
11359 *
11360 * RETURN : success: NO_ERROR
11361 * failure:
11362 *==========================================================================*/
11363int QCamera3HardwareInterface::setFrameParameters(
11364 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011365 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011366 int blob_request,
11367 uint32_t snapshotStreamId)
11368{
11369 /*translate from camera_metadata_t type to parm_type_t*/
11370 int rc = 0;
11371 int32_t hal_version = CAM_HAL_V3;
11372
11373 clear_metadata_buffer(mParameters);
11374 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11375 LOGE("Failed to set hal version in the parameters");
11376 return BAD_VALUE;
11377 }
11378
11379 /*we need to update the frame number in the parameters*/
11380 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11381 request->frame_number)) {
11382 LOGE("Failed to set the frame number in the parameters");
11383 return BAD_VALUE;
11384 }
11385
11386 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011387 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011388 LOGE("Failed to set stream type mask in the parameters");
11389 return BAD_VALUE;
11390 }
11391
11392 if (mUpdateDebugLevel) {
11393 uint32_t dummyDebugLevel = 0;
11394 /* The value of dummyDebugLevel is irrelavent. On
11395 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11396 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11397 dummyDebugLevel)) {
11398 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11399 return BAD_VALUE;
11400 }
11401 mUpdateDebugLevel = false;
11402 }
11403
11404 if(request->settings != NULL){
11405 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11406 if (blob_request)
11407 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11408 }
11409
11410 return rc;
11411}
11412
11413/*===========================================================================
11414 * FUNCTION : setReprocParameters
11415 *
11416 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11417 * return it.
11418 *
11419 * PARAMETERS :
11420 * @request : request that needs to be serviced
11421 *
11422 * RETURN : success: NO_ERROR
11423 * failure:
11424 *==========================================================================*/
11425int32_t QCamera3HardwareInterface::setReprocParameters(
11426 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11427 uint32_t snapshotStreamId)
11428{
11429 /*translate from camera_metadata_t type to parm_type_t*/
11430 int rc = 0;
11431
11432 if (NULL == request->settings){
11433 LOGE("Reprocess settings cannot be NULL");
11434 return BAD_VALUE;
11435 }
11436
11437 if (NULL == reprocParam) {
11438 LOGE("Invalid reprocessing metadata buffer");
11439 return BAD_VALUE;
11440 }
11441 clear_metadata_buffer(reprocParam);
11442
11443 /*we need to update the frame number in the parameters*/
11444 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11445 request->frame_number)) {
11446 LOGE("Failed to set the frame number in the parameters");
11447 return BAD_VALUE;
11448 }
11449
11450 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11451 if (rc < 0) {
11452 LOGE("Failed to translate reproc request");
11453 return rc;
11454 }
11455
11456 CameraMetadata frame_settings;
11457 frame_settings = request->settings;
11458 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11459 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11460 int32_t *crop_count =
11461 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11462 int32_t *crop_data =
11463 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11464 int32_t *roi_map =
11465 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11466 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11467 cam_crop_data_t crop_meta;
11468 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11469 crop_meta.num_of_streams = 1;
11470 crop_meta.crop_info[0].crop.left = crop_data[0];
11471 crop_meta.crop_info[0].crop.top = crop_data[1];
11472 crop_meta.crop_info[0].crop.width = crop_data[2];
11473 crop_meta.crop_info[0].crop.height = crop_data[3];
11474
11475 crop_meta.crop_info[0].roi_map.left =
11476 roi_map[0];
11477 crop_meta.crop_info[0].roi_map.top =
11478 roi_map[1];
11479 crop_meta.crop_info[0].roi_map.width =
11480 roi_map[2];
11481 crop_meta.crop_info[0].roi_map.height =
11482 roi_map[3];
11483
11484 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11485 rc = BAD_VALUE;
11486 }
11487 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11488 request->input_buffer->stream,
11489 crop_meta.crop_info[0].crop.left,
11490 crop_meta.crop_info[0].crop.top,
11491 crop_meta.crop_info[0].crop.width,
11492 crop_meta.crop_info[0].crop.height);
11493 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11494 request->input_buffer->stream,
11495 crop_meta.crop_info[0].roi_map.left,
11496 crop_meta.crop_info[0].roi_map.top,
11497 crop_meta.crop_info[0].roi_map.width,
11498 crop_meta.crop_info[0].roi_map.height);
11499 } else {
11500 LOGE("Invalid reprocess crop count %d!", *crop_count);
11501 }
11502 } else {
11503 LOGE("No crop data from matching output stream");
11504 }
11505
11506 /* These settings are not needed for regular requests so handle them specially for
11507 reprocess requests; information needed for EXIF tags */
11508 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11509 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11510 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11511 if (NAME_NOT_FOUND != val) {
11512 uint32_t flashMode = (uint32_t)val;
11513 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11514 rc = BAD_VALUE;
11515 }
11516 } else {
11517 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11518 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11519 }
11520 } else {
11521 LOGH("No flash mode in reprocess settings");
11522 }
11523
11524 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11525 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11526 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11527 rc = BAD_VALUE;
11528 }
11529 } else {
11530 LOGH("No flash state in reprocess settings");
11531 }
11532
11533 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11534 uint8_t *reprocessFlags =
11535 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11536 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11537 *reprocessFlags)) {
11538 rc = BAD_VALUE;
11539 }
11540 }
11541
Thierry Strudel54dc9782017-02-15 12:12:10 -080011542 // Add exif debug data to internal metadata
11543 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11544 mm_jpeg_debug_exif_params_t *debug_params =
11545 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11546 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11547 // AE
11548 if (debug_params->ae_debug_params_valid == TRUE) {
11549 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11550 debug_params->ae_debug_params);
11551 }
11552 // AWB
11553 if (debug_params->awb_debug_params_valid == TRUE) {
11554 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11555 debug_params->awb_debug_params);
11556 }
11557 // AF
11558 if (debug_params->af_debug_params_valid == TRUE) {
11559 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11560 debug_params->af_debug_params);
11561 }
11562 // ASD
11563 if (debug_params->asd_debug_params_valid == TRUE) {
11564 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11565 debug_params->asd_debug_params);
11566 }
11567 // Stats
11568 if (debug_params->stats_debug_params_valid == TRUE) {
11569 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11570 debug_params->stats_debug_params);
11571 }
11572 // BE Stats
11573 if (debug_params->bestats_debug_params_valid == TRUE) {
11574 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11575 debug_params->bestats_debug_params);
11576 }
11577 // BHIST
11578 if (debug_params->bhist_debug_params_valid == TRUE) {
11579 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11580 debug_params->bhist_debug_params);
11581 }
11582 // 3A Tuning
11583 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11584 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11585 debug_params->q3a_tuning_debug_params);
11586 }
11587 }
11588
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011589 // Add metadata which reprocess needs
11590 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11591 cam_reprocess_info_t *repro_info =
11592 (cam_reprocess_info_t *)frame_settings.find
11593 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011594 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011595 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011596 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011597 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011598 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011599 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011600 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011601 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011602 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011603 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011604 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011605 repro_info->pipeline_flip);
11606 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11607 repro_info->af_roi);
11608 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11609 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011610 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11611 CAM_INTF_PARM_ROTATION metadata then has been added in
11612 translateToHalMetadata. HAL need to keep this new rotation
11613 metadata. Otherwise, the old rotation info saved in the vendor tag
11614 would be used */
11615 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11616 CAM_INTF_PARM_ROTATION, reprocParam) {
11617 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11618 } else {
11619 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011620 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011621 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011622 }
11623
11624 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11625 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11626 roi.width and roi.height would be the final JPEG size.
11627 For now, HAL only checks this for reprocess request */
11628 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11629 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11630 uint8_t *enable =
11631 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11632 if (*enable == TRUE) {
11633 int32_t *crop_data =
11634 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11635 cam_stream_crop_info_t crop_meta;
11636 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11637 crop_meta.stream_id = 0;
11638 crop_meta.crop.left = crop_data[0];
11639 crop_meta.crop.top = crop_data[1];
11640 crop_meta.crop.width = crop_data[2];
11641 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011642 // The JPEG crop roi should match cpp output size
11643 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11644 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11645 crop_meta.roi_map.left = 0;
11646 crop_meta.roi_map.top = 0;
11647 crop_meta.roi_map.width = cpp_crop->crop.width;
11648 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011649 }
11650 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11651 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011652 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011653 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011654 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11655 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011656 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011657 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11658
11659 // Add JPEG scale information
11660 cam_dimension_t scale_dim;
11661 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11662 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11663 int32_t *roi =
11664 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11665 scale_dim.width = roi[2];
11666 scale_dim.height = roi[3];
11667 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11668 scale_dim);
11669 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11670 scale_dim.width, scale_dim.height, mCameraId);
11671 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011672 }
11673 }
11674
11675 return rc;
11676}
11677
11678/*===========================================================================
11679 * FUNCTION : saveRequestSettings
11680 *
11681 * DESCRIPTION: Add any settings that might have changed to the request settings
11682 * and save the settings to be applied on the frame
11683 *
11684 * PARAMETERS :
11685 * @jpegMetadata : the extracted and/or modified jpeg metadata
11686 * @request : request with initial settings
11687 *
11688 * RETURN :
11689 * camera_metadata_t* : pointer to the saved request settings
11690 *==========================================================================*/
11691camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11692 const CameraMetadata &jpegMetadata,
11693 camera3_capture_request_t *request)
11694{
11695 camera_metadata_t *resultMetadata;
11696 CameraMetadata camMetadata;
11697 camMetadata = request->settings;
11698
11699 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11700 int32_t thumbnail_size[2];
11701 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11702 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11703 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11704 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11705 }
11706
11707 if (request->input_buffer != NULL) {
11708 uint8_t reprocessFlags = 1;
11709 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11710 (uint8_t*)&reprocessFlags,
11711 sizeof(reprocessFlags));
11712 }
11713
11714 resultMetadata = camMetadata.release();
11715 return resultMetadata;
11716}
11717
11718/*===========================================================================
11719 * FUNCTION : setHalFpsRange
11720 *
11721 * DESCRIPTION: set FPS range parameter
11722 *
11723 *
11724 * PARAMETERS :
11725 * @settings : Metadata from framework
11726 * @hal_metadata: Metadata buffer
11727 *
11728 *
11729 * RETURN : success: NO_ERROR
11730 * failure:
11731 *==========================================================================*/
11732int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11733 metadata_buffer_t *hal_metadata)
11734{
11735 int32_t rc = NO_ERROR;
11736 cam_fps_range_t fps_range;
11737 fps_range.min_fps = (float)
11738 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11739 fps_range.max_fps = (float)
11740 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11741 fps_range.video_min_fps = fps_range.min_fps;
11742 fps_range.video_max_fps = fps_range.max_fps;
11743
11744 LOGD("aeTargetFpsRange fps: [%f %f]",
11745 fps_range.min_fps, fps_range.max_fps);
11746 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11747 * follows:
11748 * ---------------------------------------------------------------|
11749 * Video stream is absent in configure_streams |
11750 * (Camcorder preview before the first video record |
11751 * ---------------------------------------------------------------|
11752 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11753 * | | | vid_min/max_fps|
11754 * ---------------------------------------------------------------|
11755 * NO | [ 30, 240] | 240 | [240, 240] |
11756 * |-------------|-------------|----------------|
11757 * | [240, 240] | 240 | [240, 240] |
11758 * ---------------------------------------------------------------|
11759 * Video stream is present in configure_streams |
11760 * ---------------------------------------------------------------|
11761 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11762 * | | | vid_min/max_fps|
11763 * ---------------------------------------------------------------|
11764 * NO | [ 30, 240] | 240 | [240, 240] |
11765 * (camcorder prev |-------------|-------------|----------------|
11766 * after video rec | [240, 240] | 240 | [240, 240] |
11767 * is stopped) | | | |
11768 * ---------------------------------------------------------------|
11769 * YES | [ 30, 240] | 240 | [240, 240] |
11770 * |-------------|-------------|----------------|
11771 * | [240, 240] | 240 | [240, 240] |
11772 * ---------------------------------------------------------------|
11773 * When Video stream is absent in configure_streams,
11774 * preview fps = sensor_fps / batchsize
11775 * Eg: for 240fps at batchSize 4, preview = 60fps
11776 * for 120fps at batchSize 4, preview = 30fps
11777 *
11778 * When video stream is present in configure_streams, preview fps is as per
11779 * the ratio of preview buffers to video buffers requested in process
11780 * capture request
11781 */
11782 mBatchSize = 0;
11783 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11784 fps_range.min_fps = fps_range.video_max_fps;
11785 fps_range.video_min_fps = fps_range.video_max_fps;
11786 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11787 fps_range.max_fps);
11788 if (NAME_NOT_FOUND != val) {
11789 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11790 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11791 return BAD_VALUE;
11792 }
11793
11794 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11795 /* If batchmode is currently in progress and the fps changes,
11796 * set the flag to restart the sensor */
11797 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11798 (mHFRVideoFps != fps_range.max_fps)) {
11799 mNeedSensorRestart = true;
11800 }
11801 mHFRVideoFps = fps_range.max_fps;
11802 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11803 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11804 mBatchSize = MAX_HFR_BATCH_SIZE;
11805 }
11806 }
11807 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11808
11809 }
11810 } else {
11811 /* HFR mode is session param in backend/ISP. This should be reset when
11812 * in non-HFR mode */
11813 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11814 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11815 return BAD_VALUE;
11816 }
11817 }
11818 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11819 return BAD_VALUE;
11820 }
11821 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11822 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11823 return rc;
11824}
11825
11826/*===========================================================================
11827 * FUNCTION : translateToHalMetadata
11828 *
11829 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11830 *
11831 *
11832 * PARAMETERS :
11833 * @request : request sent from framework
11834 *
11835 *
11836 * RETURN : success: NO_ERROR
11837 * failure:
11838 *==========================================================================*/
11839int QCamera3HardwareInterface::translateToHalMetadata
11840 (const camera3_capture_request_t *request,
11841 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011842 uint32_t snapshotStreamId) {
11843 if (request == nullptr || hal_metadata == nullptr) {
11844 return BAD_VALUE;
11845 }
11846
11847 int64_t minFrameDuration = getMinFrameDuration(request);
11848
11849 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11850 minFrameDuration);
11851}
11852
11853int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11854 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11855 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11856
Thierry Strudel3d639192016-09-09 11:52:26 -070011857 int rc = 0;
11858 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011859 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011860
11861 /* Do not change the order of the following list unless you know what you are
11862 * doing.
11863 * The order is laid out in such a way that parameters in the front of the table
11864 * may be used to override the parameters later in the table. Examples are:
11865 * 1. META_MODE should precede AEC/AWB/AF MODE
11866 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11867 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11868 * 4. Any mode should precede it's corresponding settings
11869 */
11870 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11871 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11872 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11873 rc = BAD_VALUE;
11874 }
11875 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11876 if (rc != NO_ERROR) {
11877 LOGE("extractSceneMode failed");
11878 }
11879 }
11880
11881 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11882 uint8_t fwk_aeMode =
11883 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11884 uint8_t aeMode;
11885 int32_t redeye;
11886
11887 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11888 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011889 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11890 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011891 } else {
11892 aeMode = CAM_AE_MODE_ON;
11893 }
11894 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11895 redeye = 1;
11896 } else {
11897 redeye = 0;
11898 }
11899
11900 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11901 fwk_aeMode);
11902 if (NAME_NOT_FOUND != val) {
11903 int32_t flashMode = (int32_t)val;
11904 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11905 }
11906
11907 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11908 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11909 rc = BAD_VALUE;
11910 }
11911 }
11912
11913 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11914 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11915 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11916 fwk_whiteLevel);
11917 if (NAME_NOT_FOUND != val) {
11918 uint8_t whiteLevel = (uint8_t)val;
11919 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11920 rc = BAD_VALUE;
11921 }
11922 }
11923 }
11924
11925 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11926 uint8_t fwk_cacMode =
11927 frame_settings.find(
11928 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11929 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11930 fwk_cacMode);
11931 if (NAME_NOT_FOUND != val) {
11932 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11933 bool entryAvailable = FALSE;
11934 // Check whether Frameworks set CAC mode is supported in device or not
11935 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11936 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11937 entryAvailable = TRUE;
11938 break;
11939 }
11940 }
11941 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11942 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11943 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11944 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11945 if (entryAvailable == FALSE) {
11946 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11947 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11948 } else {
11949 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11950 // High is not supported and so set the FAST as spec say's underlying
11951 // device implementation can be the same for both modes.
11952 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11953 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11954 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11955 // in order to avoid the fps drop due to high quality
11956 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11957 } else {
11958 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11959 }
11960 }
11961 }
11962 LOGD("Final cacMode is %d", cacMode);
11963 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11964 rc = BAD_VALUE;
11965 }
11966 } else {
11967 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11968 }
11969 }
11970
Thierry Strudel2896d122017-02-23 19:18:03 -080011971 char af_value[PROPERTY_VALUE_MAX];
11972 property_get("persist.camera.af.infinity", af_value, "0");
11973
Jason Lee84ae9972017-02-24 13:24:24 -080011974 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011975 if (atoi(af_value) == 0) {
11976 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011977 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011978 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11979 fwk_focusMode);
11980 if (NAME_NOT_FOUND != val) {
11981 uint8_t focusMode = (uint8_t)val;
11982 LOGD("set focus mode %d", focusMode);
11983 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11984 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11985 rc = BAD_VALUE;
11986 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011987 }
11988 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011989 } else {
11990 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11991 LOGE("Focus forced to infinity %d", focusMode);
11992 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11993 rc = BAD_VALUE;
11994 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011995 }
11996
Jason Lee84ae9972017-02-24 13:24:24 -080011997 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11998 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011999 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12000 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12001 focalDistance)) {
12002 rc = BAD_VALUE;
12003 }
12004 }
12005
12006 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12007 uint8_t fwk_antibandingMode =
12008 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12009 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12010 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12011 if (NAME_NOT_FOUND != val) {
12012 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012013 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12014 if (m60HzZone) {
12015 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12016 } else {
12017 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12018 }
12019 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012020 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12021 hal_antibandingMode)) {
12022 rc = BAD_VALUE;
12023 }
12024 }
12025 }
12026
12027 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12028 int32_t expCompensation = frame_settings.find(
12029 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12030 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12031 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12032 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12033 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012034 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012035 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12036 expCompensation)) {
12037 rc = BAD_VALUE;
12038 }
12039 }
12040
12041 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12042 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12043 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12044 rc = BAD_VALUE;
12045 }
12046 }
12047 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12048 rc = setHalFpsRange(frame_settings, hal_metadata);
12049 if (rc != NO_ERROR) {
12050 LOGE("setHalFpsRange failed");
12051 }
12052 }
12053
12054 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12055 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12056 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12057 rc = BAD_VALUE;
12058 }
12059 }
12060
12061 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12062 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12063 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12064 fwk_effectMode);
12065 if (NAME_NOT_FOUND != val) {
12066 uint8_t effectMode = (uint8_t)val;
12067 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12068 rc = BAD_VALUE;
12069 }
12070 }
12071 }
12072
12073 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12074 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12075 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12076 colorCorrectMode)) {
12077 rc = BAD_VALUE;
12078 }
12079 }
12080
12081 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12082 cam_color_correct_gains_t colorCorrectGains;
12083 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12084 colorCorrectGains.gains[i] =
12085 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12086 }
12087 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12088 colorCorrectGains)) {
12089 rc = BAD_VALUE;
12090 }
12091 }
12092
12093 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12094 cam_color_correct_matrix_t colorCorrectTransform;
12095 cam_rational_type_t transform_elem;
12096 size_t num = 0;
12097 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12098 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12099 transform_elem.numerator =
12100 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12101 transform_elem.denominator =
12102 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12103 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12104 num++;
12105 }
12106 }
12107 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12108 colorCorrectTransform)) {
12109 rc = BAD_VALUE;
12110 }
12111 }
12112
12113 cam_trigger_t aecTrigger;
12114 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12115 aecTrigger.trigger_id = -1;
12116 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12117 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12118 aecTrigger.trigger =
12119 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12120 aecTrigger.trigger_id =
12121 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12122 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12123 aecTrigger)) {
12124 rc = BAD_VALUE;
12125 }
12126 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12127 aecTrigger.trigger, aecTrigger.trigger_id);
12128 }
12129
12130 /*af_trigger must come with a trigger id*/
12131 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12132 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12133 cam_trigger_t af_trigger;
12134 af_trigger.trigger =
12135 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12136 af_trigger.trigger_id =
12137 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12138 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12139 rc = BAD_VALUE;
12140 }
12141 LOGD("AfTrigger: %d AfTriggerID: %d",
12142 af_trigger.trigger, af_trigger.trigger_id);
12143 }
12144
12145 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12146 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12147 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12148 rc = BAD_VALUE;
12149 }
12150 }
12151 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12152 cam_edge_application_t edge_application;
12153 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012154
Thierry Strudel3d639192016-09-09 11:52:26 -070012155 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12156 edge_application.sharpness = 0;
12157 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012158 edge_application.sharpness =
12159 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12160 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12161 int32_t sharpness =
12162 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12163 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12164 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12165 LOGD("Setting edge mode sharpness %d", sharpness);
12166 edge_application.sharpness = sharpness;
12167 }
12168 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012169 }
12170 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12171 rc = BAD_VALUE;
12172 }
12173 }
12174
12175 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12176 int32_t respectFlashMode = 1;
12177 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12178 uint8_t fwk_aeMode =
12179 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012180 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12181 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12182 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012183 respectFlashMode = 0;
12184 LOGH("AE Mode controls flash, ignore android.flash.mode");
12185 }
12186 }
12187 if (respectFlashMode) {
12188 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12189 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12190 LOGH("flash mode after mapping %d", val);
12191 // To check: CAM_INTF_META_FLASH_MODE usage
12192 if (NAME_NOT_FOUND != val) {
12193 uint8_t flashMode = (uint8_t)val;
12194 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12195 rc = BAD_VALUE;
12196 }
12197 }
12198 }
12199 }
12200
12201 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12202 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12203 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12204 rc = BAD_VALUE;
12205 }
12206 }
12207
12208 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12209 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12210 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12211 flashFiringTime)) {
12212 rc = BAD_VALUE;
12213 }
12214 }
12215
12216 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12217 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12218 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12219 hotPixelMode)) {
12220 rc = BAD_VALUE;
12221 }
12222 }
12223
12224 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12225 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12226 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12227 lensAperture)) {
12228 rc = BAD_VALUE;
12229 }
12230 }
12231
12232 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12233 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12234 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12235 filterDensity)) {
12236 rc = BAD_VALUE;
12237 }
12238 }
12239
12240 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12241 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12242 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12243 focalLength)) {
12244 rc = BAD_VALUE;
12245 }
12246 }
12247
12248 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12249 uint8_t optStabMode =
12250 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12251 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12252 optStabMode)) {
12253 rc = BAD_VALUE;
12254 }
12255 }
12256
12257 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12258 uint8_t videoStabMode =
12259 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12260 LOGD("videoStabMode from APP = %d", videoStabMode);
12261 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12262 videoStabMode)) {
12263 rc = BAD_VALUE;
12264 }
12265 }
12266
12267
12268 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12269 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12270 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12271 noiseRedMode)) {
12272 rc = BAD_VALUE;
12273 }
12274 }
12275
12276 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12277 float reprocessEffectiveExposureFactor =
12278 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12279 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12280 reprocessEffectiveExposureFactor)) {
12281 rc = BAD_VALUE;
12282 }
12283 }
12284
12285 cam_crop_region_t scalerCropRegion;
12286 bool scalerCropSet = false;
12287 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12288 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12289 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12290 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12291 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12292
12293 // Map coordinate system from active array to sensor output.
12294 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12295 scalerCropRegion.width, scalerCropRegion.height);
12296
12297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12298 scalerCropRegion)) {
12299 rc = BAD_VALUE;
12300 }
12301 scalerCropSet = true;
12302 }
12303
12304 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12305 int64_t sensorExpTime =
12306 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12307 LOGD("setting sensorExpTime %lld", sensorExpTime);
12308 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12309 sensorExpTime)) {
12310 rc = BAD_VALUE;
12311 }
12312 }
12313
12314 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12315 int64_t sensorFrameDuration =
12316 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012317 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12318 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12319 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12320 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12321 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12322 sensorFrameDuration)) {
12323 rc = BAD_VALUE;
12324 }
12325 }
12326
12327 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12328 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12329 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12330 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12331 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12332 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12333 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12334 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12335 sensorSensitivity)) {
12336 rc = BAD_VALUE;
12337 }
12338 }
12339
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012340#ifndef USE_HAL_3_3
12341 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12342 int32_t ispSensitivity =
12343 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12344 if (ispSensitivity <
12345 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12346 ispSensitivity =
12347 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12348 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12349 }
12350 if (ispSensitivity >
12351 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12352 ispSensitivity =
12353 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12354 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12355 }
12356 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12357 ispSensitivity)) {
12358 rc = BAD_VALUE;
12359 }
12360 }
12361#endif
12362
Thierry Strudel3d639192016-09-09 11:52:26 -070012363 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12364 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12365 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12366 rc = BAD_VALUE;
12367 }
12368 }
12369
12370 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12371 uint8_t fwk_facedetectMode =
12372 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12373
12374 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12375 fwk_facedetectMode);
12376
12377 if (NAME_NOT_FOUND != val) {
12378 uint8_t facedetectMode = (uint8_t)val;
12379 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12380 facedetectMode)) {
12381 rc = BAD_VALUE;
12382 }
12383 }
12384 }
12385
Thierry Strudel54dc9782017-02-15 12:12:10 -080012386 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012387 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012388 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012389 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12390 histogramMode)) {
12391 rc = BAD_VALUE;
12392 }
12393 }
12394
12395 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12396 uint8_t sharpnessMapMode =
12397 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12398 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12399 sharpnessMapMode)) {
12400 rc = BAD_VALUE;
12401 }
12402 }
12403
12404 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12405 uint8_t tonemapMode =
12406 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12407 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12408 rc = BAD_VALUE;
12409 }
12410 }
12411 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12412 /*All tonemap channels will have the same number of points*/
12413 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12414 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12415 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12416 cam_rgb_tonemap_curves tonemapCurves;
12417 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12418 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12419 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12420 tonemapCurves.tonemap_points_cnt,
12421 CAM_MAX_TONEMAP_CURVE_SIZE);
12422 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12423 }
12424
12425 /* ch0 = G*/
12426 size_t point = 0;
12427 cam_tonemap_curve_t tonemapCurveGreen;
12428 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12429 for (size_t j = 0; j < 2; j++) {
12430 tonemapCurveGreen.tonemap_points[i][j] =
12431 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12432 point++;
12433 }
12434 }
12435 tonemapCurves.curves[0] = tonemapCurveGreen;
12436
12437 /* ch 1 = B */
12438 point = 0;
12439 cam_tonemap_curve_t tonemapCurveBlue;
12440 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12441 for (size_t j = 0; j < 2; j++) {
12442 tonemapCurveBlue.tonemap_points[i][j] =
12443 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12444 point++;
12445 }
12446 }
12447 tonemapCurves.curves[1] = tonemapCurveBlue;
12448
12449 /* ch 2 = R */
12450 point = 0;
12451 cam_tonemap_curve_t tonemapCurveRed;
12452 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12453 for (size_t j = 0; j < 2; j++) {
12454 tonemapCurveRed.tonemap_points[i][j] =
12455 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12456 point++;
12457 }
12458 }
12459 tonemapCurves.curves[2] = tonemapCurveRed;
12460
12461 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12462 tonemapCurves)) {
12463 rc = BAD_VALUE;
12464 }
12465 }
12466
12467 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12468 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12469 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12470 captureIntent)) {
12471 rc = BAD_VALUE;
12472 }
12473 }
12474
12475 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12476 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12477 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12478 blackLevelLock)) {
12479 rc = BAD_VALUE;
12480 }
12481 }
12482
12483 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12484 uint8_t lensShadingMapMode =
12485 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12486 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12487 lensShadingMapMode)) {
12488 rc = BAD_VALUE;
12489 }
12490 }
12491
12492 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12493 cam_area_t roi;
12494 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012495 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012496
12497 // Map coordinate system from active array to sensor output.
12498 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12499 roi.rect.height);
12500
12501 if (scalerCropSet) {
12502 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12503 }
12504 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12505 rc = BAD_VALUE;
12506 }
12507 }
12508
12509 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12510 cam_area_t roi;
12511 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012512 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012513
12514 // Map coordinate system from active array to sensor output.
12515 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12516 roi.rect.height);
12517
12518 if (scalerCropSet) {
12519 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12520 }
12521 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12522 rc = BAD_VALUE;
12523 }
12524 }
12525
12526 // CDS for non-HFR non-video mode
12527 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12528 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12529 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12530 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12531 LOGE("Invalid CDS mode %d!", *fwk_cds);
12532 } else {
12533 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12534 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12535 rc = BAD_VALUE;
12536 }
12537 }
12538 }
12539
Thierry Strudel04e026f2016-10-10 11:27:36 -070012540 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012541 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012542 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012543 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12544 }
12545 if (m_bVideoHdrEnabled)
12546 vhdr = CAM_VIDEO_HDR_MODE_ON;
12547
Thierry Strudel54dc9782017-02-15 12:12:10 -080012548 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12549
12550 if(vhdr != curr_hdr_state)
12551 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12552
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012553 rc = setVideoHdrMode(mParameters, vhdr);
12554 if (rc != NO_ERROR) {
12555 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012556 }
12557
12558 //IR
12559 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12560 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12561 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012562 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12563 uint8_t isIRon = 0;
12564
12565 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012566 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12567 LOGE("Invalid IR mode %d!", fwk_ir);
12568 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012569 if(isIRon != curr_ir_state )
12570 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12571
Thierry Strudel04e026f2016-10-10 11:27:36 -070012572 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12573 CAM_INTF_META_IR_MODE, fwk_ir)) {
12574 rc = BAD_VALUE;
12575 }
12576 }
12577 }
12578
Thierry Strudel54dc9782017-02-15 12:12:10 -080012579 //Binning Correction Mode
12580 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12581 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12582 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12583 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12584 || (0 > fwk_binning_correction)) {
12585 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12586 } else {
12587 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12588 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12589 rc = BAD_VALUE;
12590 }
12591 }
12592 }
12593
Thierry Strudel269c81a2016-10-12 12:13:59 -070012594 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12595 float aec_speed;
12596 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12597 LOGD("AEC Speed :%f", aec_speed);
12598 if ( aec_speed < 0 ) {
12599 LOGE("Invalid AEC mode %f!", aec_speed);
12600 } else {
12601 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12602 aec_speed)) {
12603 rc = BAD_VALUE;
12604 }
12605 }
12606 }
12607
12608 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12609 float awb_speed;
12610 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12611 LOGD("AWB Speed :%f", awb_speed);
12612 if ( awb_speed < 0 ) {
12613 LOGE("Invalid AWB mode %f!", awb_speed);
12614 } else {
12615 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12616 awb_speed)) {
12617 rc = BAD_VALUE;
12618 }
12619 }
12620 }
12621
Thierry Strudel3d639192016-09-09 11:52:26 -070012622 // TNR
12623 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12624 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12625 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012626 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012627 cam_denoise_param_t tnr;
12628 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12629 tnr.process_plates =
12630 (cam_denoise_process_type_t)frame_settings.find(
12631 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12632 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012633
12634 if(b_TnrRequested != curr_tnr_state)
12635 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12636
Thierry Strudel3d639192016-09-09 11:52:26 -070012637 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12638 rc = BAD_VALUE;
12639 }
12640 }
12641
Thierry Strudel54dc9782017-02-15 12:12:10 -080012642 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012643 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012644 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012645 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12646 *exposure_metering_mode)) {
12647 rc = BAD_VALUE;
12648 }
12649 }
12650
Thierry Strudel3d639192016-09-09 11:52:26 -070012651 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12652 int32_t fwk_testPatternMode =
12653 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12654 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12655 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12656
12657 if (NAME_NOT_FOUND != testPatternMode) {
12658 cam_test_pattern_data_t testPatternData;
12659 memset(&testPatternData, 0, sizeof(testPatternData));
12660 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12661 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12662 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12663 int32_t *fwk_testPatternData =
12664 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12665 testPatternData.r = fwk_testPatternData[0];
12666 testPatternData.b = fwk_testPatternData[3];
12667 switch (gCamCapability[mCameraId]->color_arrangement) {
12668 case CAM_FILTER_ARRANGEMENT_RGGB:
12669 case CAM_FILTER_ARRANGEMENT_GRBG:
12670 testPatternData.gr = fwk_testPatternData[1];
12671 testPatternData.gb = fwk_testPatternData[2];
12672 break;
12673 case CAM_FILTER_ARRANGEMENT_GBRG:
12674 case CAM_FILTER_ARRANGEMENT_BGGR:
12675 testPatternData.gr = fwk_testPatternData[2];
12676 testPatternData.gb = fwk_testPatternData[1];
12677 break;
12678 default:
12679 LOGE("color arrangement %d is not supported",
12680 gCamCapability[mCameraId]->color_arrangement);
12681 break;
12682 }
12683 }
12684 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12685 testPatternData)) {
12686 rc = BAD_VALUE;
12687 }
12688 } else {
12689 LOGE("Invalid framework sensor test pattern mode %d",
12690 fwk_testPatternMode);
12691 }
12692 }
12693
12694 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12695 size_t count = 0;
12696 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12697 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12698 gps_coords.data.d, gps_coords.count, count);
12699 if (gps_coords.count != count) {
12700 rc = BAD_VALUE;
12701 }
12702 }
12703
12704 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12705 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12706 size_t count = 0;
12707 const char *gps_methods_src = (const char *)
12708 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12709 memset(gps_methods, '\0', sizeof(gps_methods));
12710 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12711 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12712 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12713 if (GPS_PROCESSING_METHOD_SIZE != count) {
12714 rc = BAD_VALUE;
12715 }
12716 }
12717
12718 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12719 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12720 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12721 gps_timestamp)) {
12722 rc = BAD_VALUE;
12723 }
12724 }
12725
12726 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12727 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12728 cam_rotation_info_t rotation_info;
12729 if (orientation == 0) {
12730 rotation_info.rotation = ROTATE_0;
12731 } else if (orientation == 90) {
12732 rotation_info.rotation = ROTATE_90;
12733 } else if (orientation == 180) {
12734 rotation_info.rotation = ROTATE_180;
12735 } else if (orientation == 270) {
12736 rotation_info.rotation = ROTATE_270;
12737 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012738 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012739 rotation_info.streamId = snapshotStreamId;
12740 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12742 rc = BAD_VALUE;
12743 }
12744 }
12745
12746 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12747 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12749 rc = BAD_VALUE;
12750 }
12751 }
12752
12753 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12754 uint32_t thumb_quality = (uint32_t)
12755 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12756 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12757 thumb_quality)) {
12758 rc = BAD_VALUE;
12759 }
12760 }
12761
12762 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12763 cam_dimension_t dim;
12764 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12765 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12766 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12767 rc = BAD_VALUE;
12768 }
12769 }
12770
12771 // Internal metadata
12772 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12773 size_t count = 0;
12774 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12775 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12776 privatedata.data.i32, privatedata.count, count);
12777 if (privatedata.count != count) {
12778 rc = BAD_VALUE;
12779 }
12780 }
12781
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012782 // ISO/Exposure Priority
12783 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12784 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12785 cam_priority_mode_t mode =
12786 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12787 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12788 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12789 use_iso_exp_pty.previewOnly = FALSE;
12790 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12791 use_iso_exp_pty.value = *ptr;
12792
12793 if(CAM_ISO_PRIORITY == mode) {
12794 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12795 use_iso_exp_pty)) {
12796 rc = BAD_VALUE;
12797 }
12798 }
12799 else {
12800 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12801 use_iso_exp_pty)) {
12802 rc = BAD_VALUE;
12803 }
12804 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012805
12806 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12807 rc = BAD_VALUE;
12808 }
12809 }
12810 } else {
12811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12812 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012813 }
12814 }
12815
12816 // Saturation
12817 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12818 int32_t* use_saturation =
12819 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12820 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12821 rc = BAD_VALUE;
12822 }
12823 }
12824
Thierry Strudel3d639192016-09-09 11:52:26 -070012825 // EV step
12826 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12827 gCamCapability[mCameraId]->exp_compensation_step)) {
12828 rc = BAD_VALUE;
12829 }
12830
12831 // CDS info
12832 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12833 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12834 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12835
12836 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12837 CAM_INTF_META_CDS_DATA, *cdsData)) {
12838 rc = BAD_VALUE;
12839 }
12840 }
12841
Shuzhen Wang19463d72016-03-08 11:09:52 -080012842 // Hybrid AE
12843 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12844 uint8_t *hybrid_ae = (uint8_t *)
12845 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12846
12847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12848 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12849 rc = BAD_VALUE;
12850 }
12851 }
12852
Shuzhen Wang14415f52016-11-16 18:26:18 -080012853 // Histogram
12854 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12855 uint8_t histogramMode =
12856 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12857 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12858 histogramMode)) {
12859 rc = BAD_VALUE;
12860 }
12861 }
12862
12863 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12864 int32_t histogramBins =
12865 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12866 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12867 histogramBins)) {
12868 rc = BAD_VALUE;
12869 }
12870 }
12871
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012872 // Tracking AF
12873 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12874 uint8_t trackingAfTrigger =
12875 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12876 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12877 trackingAfTrigger)) {
12878 rc = BAD_VALUE;
12879 }
12880 }
12881
Thierry Strudel3d639192016-09-09 11:52:26 -070012882 return rc;
12883}
12884
12885/*===========================================================================
12886 * FUNCTION : captureResultCb
12887 *
12888 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12889 *
12890 * PARAMETERS :
12891 * @frame : frame information from mm-camera-interface
12892 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12893 * @userdata: userdata
12894 *
12895 * RETURN : NONE
12896 *==========================================================================*/
12897void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12898 camera3_stream_buffer_t *buffer,
12899 uint32_t frame_number, bool isInputBuffer, void *userdata)
12900{
12901 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12902 if (hw == NULL) {
12903 LOGE("Invalid hw %p", hw);
12904 return;
12905 }
12906
12907 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12908 return;
12909}
12910
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012911/*===========================================================================
12912 * FUNCTION : setBufferErrorStatus
12913 *
12914 * DESCRIPTION: Callback handler for channels to report any buffer errors
12915 *
12916 * PARAMETERS :
12917 * @ch : Channel on which buffer error is reported from
12918 * @frame_number : frame number on which buffer error is reported on
12919 * @buffer_status : buffer error status
12920 * @userdata: userdata
12921 *
12922 * RETURN : NONE
12923 *==========================================================================*/
12924void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12925 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12926{
12927 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12928 if (hw == NULL) {
12929 LOGE("Invalid hw %p", hw);
12930 return;
12931 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012932
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012933 hw->setBufferErrorStatus(ch, frame_number, err);
12934 return;
12935}
12936
12937void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12938 uint32_t frameNumber, camera3_buffer_status_t err)
12939{
12940 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12941 pthread_mutex_lock(&mMutex);
12942
12943 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12944 if (req.frame_number != frameNumber)
12945 continue;
12946 for (auto& k : req.mPendingBufferList) {
12947 if(k.stream->priv == ch) {
12948 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12949 }
12950 }
12951 }
12952
12953 pthread_mutex_unlock(&mMutex);
12954 return;
12955}
Thierry Strudel3d639192016-09-09 11:52:26 -070012956/*===========================================================================
12957 * FUNCTION : initialize
12958 *
12959 * DESCRIPTION: Pass framework callback pointers to HAL
12960 *
12961 * PARAMETERS :
12962 *
12963 *
12964 * RETURN : Success : 0
12965 * Failure: -ENODEV
12966 *==========================================================================*/
12967
12968int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12969 const camera3_callback_ops_t *callback_ops)
12970{
12971 LOGD("E");
12972 QCamera3HardwareInterface *hw =
12973 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12974 if (!hw) {
12975 LOGE("NULL camera device");
12976 return -ENODEV;
12977 }
12978
12979 int rc = hw->initialize(callback_ops);
12980 LOGD("X");
12981 return rc;
12982}
12983
12984/*===========================================================================
12985 * FUNCTION : configure_streams
12986 *
12987 * DESCRIPTION:
12988 *
12989 * PARAMETERS :
12990 *
12991 *
12992 * RETURN : Success: 0
12993 * Failure: -EINVAL (if stream configuration is invalid)
12994 * -ENODEV (fatal error)
12995 *==========================================================================*/
12996
12997int QCamera3HardwareInterface::configure_streams(
12998 const struct camera3_device *device,
12999 camera3_stream_configuration_t *stream_list)
13000{
13001 LOGD("E");
13002 QCamera3HardwareInterface *hw =
13003 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13004 if (!hw) {
13005 LOGE("NULL camera device");
13006 return -ENODEV;
13007 }
13008 int rc = hw->configureStreams(stream_list);
13009 LOGD("X");
13010 return rc;
13011}
13012
13013/*===========================================================================
13014 * FUNCTION : construct_default_request_settings
13015 *
13016 * DESCRIPTION: Configure a settings buffer to meet the required use case
13017 *
13018 * PARAMETERS :
13019 *
13020 *
13021 * RETURN : Success: Return valid metadata
13022 * Failure: Return NULL
13023 *==========================================================================*/
13024const camera_metadata_t* QCamera3HardwareInterface::
13025 construct_default_request_settings(const struct camera3_device *device,
13026 int type)
13027{
13028
13029 LOGD("E");
13030 camera_metadata_t* fwk_metadata = NULL;
13031 QCamera3HardwareInterface *hw =
13032 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13033 if (!hw) {
13034 LOGE("NULL camera device");
13035 return NULL;
13036 }
13037
13038 fwk_metadata = hw->translateCapabilityToMetadata(type);
13039
13040 LOGD("X");
13041 return fwk_metadata;
13042}
13043
13044/*===========================================================================
13045 * FUNCTION : process_capture_request
13046 *
13047 * DESCRIPTION:
13048 *
13049 * PARAMETERS :
13050 *
13051 *
13052 * RETURN :
13053 *==========================================================================*/
13054int QCamera3HardwareInterface::process_capture_request(
13055 const struct camera3_device *device,
13056 camera3_capture_request_t *request)
13057{
13058 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013059 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013060 QCamera3HardwareInterface *hw =
13061 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13062 if (!hw) {
13063 LOGE("NULL camera device");
13064 return -EINVAL;
13065 }
13066
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013067 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013068 LOGD("X");
13069 return rc;
13070}
13071
13072/*===========================================================================
13073 * FUNCTION : dump
13074 *
13075 * DESCRIPTION:
13076 *
13077 * PARAMETERS :
13078 *
13079 *
13080 * RETURN :
13081 *==========================================================================*/
13082
13083void QCamera3HardwareInterface::dump(
13084 const struct camera3_device *device, int fd)
13085{
13086 /* Log level property is read when "adb shell dumpsys media.camera" is
13087 called so that the log level can be controlled without restarting
13088 the media server */
13089 getLogLevel();
13090
13091 LOGD("E");
13092 QCamera3HardwareInterface *hw =
13093 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13094 if (!hw) {
13095 LOGE("NULL camera device");
13096 return;
13097 }
13098
13099 hw->dump(fd);
13100 LOGD("X");
13101 return;
13102}
13103
13104/*===========================================================================
13105 * FUNCTION : flush
13106 *
13107 * DESCRIPTION:
13108 *
13109 * PARAMETERS :
13110 *
13111 *
13112 * RETURN :
13113 *==========================================================================*/
13114
13115int QCamera3HardwareInterface::flush(
13116 const struct camera3_device *device)
13117{
13118 int rc;
13119 LOGD("E");
13120 QCamera3HardwareInterface *hw =
13121 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13122 if (!hw) {
13123 LOGE("NULL camera device");
13124 return -EINVAL;
13125 }
13126
13127 pthread_mutex_lock(&hw->mMutex);
13128 // Validate current state
13129 switch (hw->mState) {
13130 case STARTED:
13131 /* valid state */
13132 break;
13133
13134 case ERROR:
13135 pthread_mutex_unlock(&hw->mMutex);
13136 hw->handleCameraDeviceError();
13137 return -ENODEV;
13138
13139 default:
13140 LOGI("Flush returned during state %d", hw->mState);
13141 pthread_mutex_unlock(&hw->mMutex);
13142 return 0;
13143 }
13144 pthread_mutex_unlock(&hw->mMutex);
13145
13146 rc = hw->flush(true /* restart channels */ );
13147 LOGD("X");
13148 return rc;
13149}
13150
13151/*===========================================================================
13152 * FUNCTION : close_camera_device
13153 *
13154 * DESCRIPTION:
13155 *
13156 * PARAMETERS :
13157 *
13158 *
13159 * RETURN :
13160 *==========================================================================*/
13161int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13162{
13163 int ret = NO_ERROR;
13164 QCamera3HardwareInterface *hw =
13165 reinterpret_cast<QCamera3HardwareInterface *>(
13166 reinterpret_cast<camera3_device_t *>(device)->priv);
13167 if (!hw) {
13168 LOGE("NULL camera device");
13169 return BAD_VALUE;
13170 }
13171
13172 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13173 delete hw;
13174 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013175 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013176 return ret;
13177}
13178
13179/*===========================================================================
13180 * FUNCTION : getWaveletDenoiseProcessPlate
13181 *
13182 * DESCRIPTION: query wavelet denoise process plate
13183 *
13184 * PARAMETERS : None
13185 *
13186 * RETURN : WNR prcocess plate value
13187 *==========================================================================*/
13188cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13189{
13190 char prop[PROPERTY_VALUE_MAX];
13191 memset(prop, 0, sizeof(prop));
13192 property_get("persist.denoise.process.plates", prop, "0");
13193 int processPlate = atoi(prop);
13194 switch(processPlate) {
13195 case 0:
13196 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13197 case 1:
13198 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13199 case 2:
13200 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13201 case 3:
13202 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13203 default:
13204 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13205 }
13206}
13207
13208
13209/*===========================================================================
13210 * FUNCTION : getTemporalDenoiseProcessPlate
13211 *
13212 * DESCRIPTION: query temporal denoise process plate
13213 *
13214 * PARAMETERS : None
13215 *
13216 * RETURN : TNR prcocess plate value
13217 *==========================================================================*/
13218cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13219{
13220 char prop[PROPERTY_VALUE_MAX];
13221 memset(prop, 0, sizeof(prop));
13222 property_get("persist.tnr.process.plates", prop, "0");
13223 int processPlate = atoi(prop);
13224 switch(processPlate) {
13225 case 0:
13226 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13227 case 1:
13228 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13229 case 2:
13230 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13231 case 3:
13232 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13233 default:
13234 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13235 }
13236}
13237
13238
13239/*===========================================================================
13240 * FUNCTION : extractSceneMode
13241 *
13242 * DESCRIPTION: Extract scene mode from frameworks set metadata
13243 *
13244 * PARAMETERS :
13245 * @frame_settings: CameraMetadata reference
13246 * @metaMode: ANDROID_CONTORL_MODE
13247 * @hal_metadata: hal metadata structure
13248 *
13249 * RETURN : None
13250 *==========================================================================*/
13251int32_t QCamera3HardwareInterface::extractSceneMode(
13252 const CameraMetadata &frame_settings, uint8_t metaMode,
13253 metadata_buffer_t *hal_metadata)
13254{
13255 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013256 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13257
13258 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13259 LOGD("Ignoring control mode OFF_KEEP_STATE");
13260 return NO_ERROR;
13261 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013262
13263 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13264 camera_metadata_ro_entry entry =
13265 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13266 if (0 == entry.count)
13267 return rc;
13268
13269 uint8_t fwk_sceneMode = entry.data.u8[0];
13270
13271 int val = lookupHalName(SCENE_MODES_MAP,
13272 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13273 fwk_sceneMode);
13274 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013275 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013276 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013277 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013278 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013279
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013280 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13281 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13282 }
13283
13284 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13285 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013286 cam_hdr_param_t hdr_params;
13287 hdr_params.hdr_enable = 1;
13288 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13289 hdr_params.hdr_need_1x = false;
13290 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13291 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13292 rc = BAD_VALUE;
13293 }
13294 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013295
Thierry Strudel3d639192016-09-09 11:52:26 -070013296 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13297 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13298 rc = BAD_VALUE;
13299 }
13300 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013301
13302 if (mForceHdrSnapshot) {
13303 cam_hdr_param_t hdr_params;
13304 hdr_params.hdr_enable = 1;
13305 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13306 hdr_params.hdr_need_1x = false;
13307 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13308 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13309 rc = BAD_VALUE;
13310 }
13311 }
13312
Thierry Strudel3d639192016-09-09 11:52:26 -070013313 return rc;
13314}
13315
13316/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013317 * FUNCTION : setVideoHdrMode
13318 *
13319 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13320 *
13321 * PARAMETERS :
13322 * @hal_metadata: hal metadata structure
13323 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13324 *
13325 * RETURN : None
13326 *==========================================================================*/
13327int32_t QCamera3HardwareInterface::setVideoHdrMode(
13328 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13329{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013330 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13331 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13332 }
13333
13334 LOGE("Invalid Video HDR mode %d!", vhdr);
13335 return BAD_VALUE;
13336}
13337
13338/*===========================================================================
13339 * FUNCTION : setSensorHDR
13340 *
13341 * DESCRIPTION: Enable/disable sensor HDR.
13342 *
13343 * PARAMETERS :
13344 * @hal_metadata: hal metadata structure
13345 * @enable: boolean whether to enable/disable sensor HDR
13346 *
13347 * RETURN : None
13348 *==========================================================================*/
13349int32_t QCamera3HardwareInterface::setSensorHDR(
13350 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13351{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013352 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013353 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13354
13355 if (enable) {
13356 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13357 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13358 #ifdef _LE_CAMERA_
13359 //Default to staggered HDR for IOT
13360 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13361 #else
13362 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13363 #endif
13364 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13365 }
13366
13367 bool isSupported = false;
13368 switch (sensor_hdr) {
13369 case CAM_SENSOR_HDR_IN_SENSOR:
13370 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13371 CAM_QCOM_FEATURE_SENSOR_HDR) {
13372 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013373 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013374 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013375 break;
13376 case CAM_SENSOR_HDR_ZIGZAG:
13377 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13378 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13379 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013380 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013381 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013382 break;
13383 case CAM_SENSOR_HDR_STAGGERED:
13384 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13385 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13386 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013387 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013388 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013389 break;
13390 case CAM_SENSOR_HDR_OFF:
13391 isSupported = true;
13392 LOGD("Turning off sensor HDR");
13393 break;
13394 default:
13395 LOGE("HDR mode %d not supported", sensor_hdr);
13396 rc = BAD_VALUE;
13397 break;
13398 }
13399
13400 if(isSupported) {
13401 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13402 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13403 rc = BAD_VALUE;
13404 } else {
13405 if(!isVideoHdrEnable)
13406 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013407 }
13408 }
13409 return rc;
13410}
13411
13412/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013413 * FUNCTION : needRotationReprocess
13414 *
13415 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13416 *
13417 * PARAMETERS : none
13418 *
13419 * RETURN : true: needed
13420 * false: no need
13421 *==========================================================================*/
13422bool QCamera3HardwareInterface::needRotationReprocess()
13423{
13424 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13425 // current rotation is not zero, and pp has the capability to process rotation
13426 LOGH("need do reprocess for rotation");
13427 return true;
13428 }
13429
13430 return false;
13431}
13432
13433/*===========================================================================
13434 * FUNCTION : needReprocess
13435 *
13436 * DESCRIPTION: if reprocess in needed
13437 *
13438 * PARAMETERS : none
13439 *
13440 * RETURN : true: needed
13441 * false: no need
13442 *==========================================================================*/
13443bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13444{
13445 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13446 // TODO: add for ZSL HDR later
13447 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13448 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13449 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13450 return true;
13451 } else {
13452 LOGH("already post processed frame");
13453 return false;
13454 }
13455 }
13456 return needRotationReprocess();
13457}
13458
13459/*===========================================================================
13460 * FUNCTION : needJpegExifRotation
13461 *
13462 * DESCRIPTION: if rotation from jpeg is needed
13463 *
13464 * PARAMETERS : none
13465 *
13466 * RETURN : true: needed
13467 * false: no need
13468 *==========================================================================*/
13469bool QCamera3HardwareInterface::needJpegExifRotation()
13470{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013471 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013472 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13473 LOGD("Need use Jpeg EXIF Rotation");
13474 return true;
13475 }
13476 return false;
13477}
13478
13479/*===========================================================================
13480 * FUNCTION : addOfflineReprocChannel
13481 *
13482 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13483 * coming from input channel
13484 *
13485 * PARAMETERS :
13486 * @config : reprocess configuration
13487 * @inputChHandle : pointer to the input (source) channel
13488 *
13489 *
13490 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13491 *==========================================================================*/
13492QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13493 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13494{
13495 int32_t rc = NO_ERROR;
13496 QCamera3ReprocessChannel *pChannel = NULL;
13497
13498 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013499 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13500 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013501 if (NULL == pChannel) {
13502 LOGE("no mem for reprocess channel");
13503 return NULL;
13504 }
13505
13506 rc = pChannel->initialize(IS_TYPE_NONE);
13507 if (rc != NO_ERROR) {
13508 LOGE("init reprocess channel failed, ret = %d", rc);
13509 delete pChannel;
13510 return NULL;
13511 }
13512
13513 // pp feature config
13514 cam_pp_feature_config_t pp_config;
13515 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13516
13517 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13518 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13519 & CAM_QCOM_FEATURE_DSDN) {
13520 //Use CPP CDS incase h/w supports it.
13521 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13522 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13523 }
13524 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13525 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13526 }
13527
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013528 if (config.hdr_param.hdr_enable) {
13529 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13530 pp_config.hdr_param = config.hdr_param;
13531 }
13532
13533 if (mForceHdrSnapshot) {
13534 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13535 pp_config.hdr_param.hdr_enable = 1;
13536 pp_config.hdr_param.hdr_need_1x = 0;
13537 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13538 }
13539
Thierry Strudel3d639192016-09-09 11:52:26 -070013540 rc = pChannel->addReprocStreamsFromSource(pp_config,
13541 config,
13542 IS_TYPE_NONE,
13543 mMetadataChannel);
13544
13545 if (rc != NO_ERROR) {
13546 delete pChannel;
13547 return NULL;
13548 }
13549 return pChannel;
13550}
13551
13552/*===========================================================================
13553 * FUNCTION : getMobicatMask
13554 *
13555 * DESCRIPTION: returns mobicat mask
13556 *
13557 * PARAMETERS : none
13558 *
13559 * RETURN : mobicat mask
13560 *
13561 *==========================================================================*/
13562uint8_t QCamera3HardwareInterface::getMobicatMask()
13563{
13564 return m_MobicatMask;
13565}
13566
13567/*===========================================================================
13568 * FUNCTION : setMobicat
13569 *
13570 * DESCRIPTION: set Mobicat on/off.
13571 *
13572 * PARAMETERS :
13573 * @params : none
13574 *
13575 * RETURN : int32_t type of status
13576 * NO_ERROR -- success
13577 * none-zero failure code
13578 *==========================================================================*/
13579int32_t QCamera3HardwareInterface::setMobicat()
13580{
13581 char value [PROPERTY_VALUE_MAX];
13582 property_get("persist.camera.mobicat", value, "0");
13583 int32_t ret = NO_ERROR;
13584 uint8_t enableMobi = (uint8_t)atoi(value);
13585
13586 if (enableMobi) {
13587 tune_cmd_t tune_cmd;
13588 tune_cmd.type = SET_RELOAD_CHROMATIX;
13589 tune_cmd.module = MODULE_ALL;
13590 tune_cmd.value = TRUE;
13591 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13592 CAM_INTF_PARM_SET_VFE_COMMAND,
13593 tune_cmd);
13594
13595 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13596 CAM_INTF_PARM_SET_PP_COMMAND,
13597 tune_cmd);
13598 }
13599 m_MobicatMask = enableMobi;
13600
13601 return ret;
13602}
13603
13604/*===========================================================================
13605* FUNCTION : getLogLevel
13606*
13607* DESCRIPTION: Reads the log level property into a variable
13608*
13609* PARAMETERS :
13610* None
13611*
13612* RETURN :
13613* None
13614*==========================================================================*/
13615void QCamera3HardwareInterface::getLogLevel()
13616{
13617 char prop[PROPERTY_VALUE_MAX];
13618 uint32_t globalLogLevel = 0;
13619
13620 property_get("persist.camera.hal.debug", prop, "0");
13621 int val = atoi(prop);
13622 if (0 <= val) {
13623 gCamHal3LogLevel = (uint32_t)val;
13624 }
13625
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013626 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013627 gKpiDebugLevel = atoi(prop);
13628
13629 property_get("persist.camera.global.debug", prop, "0");
13630 val = atoi(prop);
13631 if (0 <= val) {
13632 globalLogLevel = (uint32_t)val;
13633 }
13634
13635 /* Highest log level among hal.logs and global.logs is selected */
13636 if (gCamHal3LogLevel < globalLogLevel)
13637 gCamHal3LogLevel = globalLogLevel;
13638
13639 return;
13640}
13641
13642/*===========================================================================
13643 * FUNCTION : validateStreamRotations
13644 *
13645 * DESCRIPTION: Check if the rotations requested are supported
13646 *
13647 * PARAMETERS :
13648 * @stream_list : streams to be configured
13649 *
13650 * RETURN : NO_ERROR on success
13651 * -EINVAL on failure
13652 *
13653 *==========================================================================*/
13654int QCamera3HardwareInterface::validateStreamRotations(
13655 camera3_stream_configuration_t *streamList)
13656{
13657 int rc = NO_ERROR;
13658
13659 /*
13660 * Loop through all streams requested in configuration
13661 * Check if unsupported rotations have been requested on any of them
13662 */
13663 for (size_t j = 0; j < streamList->num_streams; j++){
13664 camera3_stream_t *newStream = streamList->streams[j];
13665
13666 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13667 bool isImplDef = (newStream->format ==
13668 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13669 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13670 isImplDef);
13671
13672 if (isRotated && (!isImplDef || isZsl)) {
13673 LOGE("Error: Unsupported rotation of %d requested for stream"
13674 "type:%d and stream format:%d",
13675 newStream->rotation, newStream->stream_type,
13676 newStream->format);
13677 rc = -EINVAL;
13678 break;
13679 }
13680 }
13681
13682 return rc;
13683}
13684
13685/*===========================================================================
13686* FUNCTION : getFlashInfo
13687*
13688* DESCRIPTION: Retrieve information about whether the device has a flash.
13689*
13690* PARAMETERS :
13691* @cameraId : Camera id to query
13692* @hasFlash : Boolean indicating whether there is a flash device
13693* associated with given camera
13694* @flashNode : If a flash device exists, this will be its device node.
13695*
13696* RETURN :
13697* None
13698*==========================================================================*/
13699void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13700 bool& hasFlash,
13701 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13702{
13703 cam_capability_t* camCapability = gCamCapability[cameraId];
13704 if (NULL == camCapability) {
13705 hasFlash = false;
13706 flashNode[0] = '\0';
13707 } else {
13708 hasFlash = camCapability->flash_available;
13709 strlcpy(flashNode,
13710 (char*)camCapability->flash_dev_name,
13711 QCAMERA_MAX_FILEPATH_LENGTH);
13712 }
13713}
13714
13715/*===========================================================================
13716* FUNCTION : getEepromVersionInfo
13717*
13718* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13719*
13720* PARAMETERS : None
13721*
13722* RETURN : string describing EEPROM version
13723* "\0" if no such info available
13724*==========================================================================*/
13725const char *QCamera3HardwareInterface::getEepromVersionInfo()
13726{
13727 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13728}
13729
13730/*===========================================================================
13731* FUNCTION : getLdafCalib
13732*
13733* DESCRIPTION: Retrieve Laser AF calibration data
13734*
13735* PARAMETERS : None
13736*
13737* RETURN : Two uint32_t describing laser AF calibration data
13738* NULL if none is available.
13739*==========================================================================*/
13740const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13741{
13742 if (mLdafCalibExist) {
13743 return &mLdafCalib[0];
13744 } else {
13745 return NULL;
13746 }
13747}
13748
13749/*===========================================================================
13750 * FUNCTION : dynamicUpdateMetaStreamInfo
13751 *
13752 * DESCRIPTION: This function:
13753 * (1) stops all the channels
13754 * (2) returns error on pending requests and buffers
13755 * (3) sends metastream_info in setparams
13756 * (4) starts all channels
13757 * This is useful when sensor has to be restarted to apply any
13758 * settings such as frame rate from a different sensor mode
13759 *
13760 * PARAMETERS : None
13761 *
13762 * RETURN : NO_ERROR on success
13763 * Error codes on failure
13764 *
13765 *==========================================================================*/
13766int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13767{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013768 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013769 int rc = NO_ERROR;
13770
13771 LOGD("E");
13772
13773 rc = stopAllChannels();
13774 if (rc < 0) {
13775 LOGE("stopAllChannels failed");
13776 return rc;
13777 }
13778
13779 rc = notifyErrorForPendingRequests();
13780 if (rc < 0) {
13781 LOGE("notifyErrorForPendingRequests failed");
13782 return rc;
13783 }
13784
13785 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13786 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13787 "Format:%d",
13788 mStreamConfigInfo.type[i],
13789 mStreamConfigInfo.stream_sizes[i].width,
13790 mStreamConfigInfo.stream_sizes[i].height,
13791 mStreamConfigInfo.postprocess_mask[i],
13792 mStreamConfigInfo.format[i]);
13793 }
13794
13795 /* Send meta stream info once again so that ISP can start */
13796 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13797 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13798 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13799 mParameters);
13800 if (rc < 0) {
13801 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13802 }
13803
13804 rc = startAllChannels();
13805 if (rc < 0) {
13806 LOGE("startAllChannels failed");
13807 return rc;
13808 }
13809
13810 LOGD("X");
13811 return rc;
13812}
13813
13814/*===========================================================================
13815 * FUNCTION : stopAllChannels
13816 *
13817 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13818 *
13819 * PARAMETERS : None
13820 *
13821 * RETURN : NO_ERROR on success
13822 * Error codes on failure
13823 *
13824 *==========================================================================*/
13825int32_t QCamera3HardwareInterface::stopAllChannels()
13826{
13827 int32_t rc = NO_ERROR;
13828
13829 LOGD("Stopping all channels");
13830 // Stop the Streams/Channels
13831 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13832 it != mStreamInfo.end(); it++) {
13833 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13834 if (channel) {
13835 channel->stop();
13836 }
13837 (*it)->status = INVALID;
13838 }
13839
13840 if (mSupportChannel) {
13841 mSupportChannel->stop();
13842 }
13843 if (mAnalysisChannel) {
13844 mAnalysisChannel->stop();
13845 }
13846 if (mRawDumpChannel) {
13847 mRawDumpChannel->stop();
13848 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013849 if (mHdrPlusRawSrcChannel) {
13850 mHdrPlusRawSrcChannel->stop();
13851 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013852 if (mMetadataChannel) {
13853 /* If content of mStreamInfo is not 0, there is metadata stream */
13854 mMetadataChannel->stop();
13855 }
13856
13857 LOGD("All channels stopped");
13858 return rc;
13859}
13860
13861/*===========================================================================
13862 * FUNCTION : startAllChannels
13863 *
13864 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13865 *
13866 * PARAMETERS : None
13867 *
13868 * RETURN : NO_ERROR on success
13869 * Error codes on failure
13870 *
13871 *==========================================================================*/
13872int32_t QCamera3HardwareInterface::startAllChannels()
13873{
13874 int32_t rc = NO_ERROR;
13875
13876 LOGD("Start all channels ");
13877 // Start the Streams/Channels
13878 if (mMetadataChannel) {
13879 /* If content of mStreamInfo is not 0, there is metadata stream */
13880 rc = mMetadataChannel->start();
13881 if (rc < 0) {
13882 LOGE("META channel start failed");
13883 return rc;
13884 }
13885 }
13886 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13887 it != mStreamInfo.end(); it++) {
13888 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13889 if (channel) {
13890 rc = channel->start();
13891 if (rc < 0) {
13892 LOGE("channel start failed");
13893 return rc;
13894 }
13895 }
13896 }
13897 if (mAnalysisChannel) {
13898 mAnalysisChannel->start();
13899 }
13900 if (mSupportChannel) {
13901 rc = mSupportChannel->start();
13902 if (rc < 0) {
13903 LOGE("Support channel start failed");
13904 return rc;
13905 }
13906 }
13907 if (mRawDumpChannel) {
13908 rc = mRawDumpChannel->start();
13909 if (rc < 0) {
13910 LOGE("RAW dump channel start failed");
13911 return rc;
13912 }
13913 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013914 if (mHdrPlusRawSrcChannel) {
13915 rc = mHdrPlusRawSrcChannel->start();
13916 if (rc < 0) {
13917 LOGE("HDR+ RAW channel start failed");
13918 return rc;
13919 }
13920 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013921
13922 LOGD("All channels started");
13923 return rc;
13924}
13925
13926/*===========================================================================
13927 * FUNCTION : notifyErrorForPendingRequests
13928 *
13929 * DESCRIPTION: This function sends error for all the pending requests/buffers
13930 *
13931 * PARAMETERS : None
13932 *
13933 * RETURN : Error codes
13934 * NO_ERROR on success
13935 *
13936 *==========================================================================*/
13937int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13938{
Emilian Peev7650c122017-01-19 08:24:33 -080013939 notifyErrorFoPendingDepthData(mDepthChannel);
13940
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013941 auto pendingRequest = mPendingRequestsList.begin();
13942 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013943
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013944 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13945 // buffers (for which buffers aren't sent yet).
13946 while (pendingRequest != mPendingRequestsList.end() ||
13947 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13948 if (pendingRequest == mPendingRequestsList.end() ||
13949 pendingBuffer->frame_number < pendingRequest->frame_number) {
13950 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13951 // with error.
13952 for (auto &info : pendingBuffer->mPendingBufferList) {
13953 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013954 camera3_notify_msg_t notify_msg;
13955 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13956 notify_msg.type = CAMERA3_MSG_ERROR;
13957 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013958 notify_msg.message.error.error_stream = info.stream;
13959 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013960 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013961
13962 camera3_stream_buffer_t buffer = {};
13963 buffer.acquire_fence = -1;
13964 buffer.release_fence = -1;
13965 buffer.buffer = info.buffer;
13966 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13967 buffer.stream = info.stream;
13968 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070013969 }
13970
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013971 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13972 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
13973 pendingBuffer->frame_number > pendingRequest->frame_number) {
13974 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070013975 camera3_notify_msg_t notify_msg;
13976 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13977 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013978 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
13979 notify_msg.message.error.error_stream = nullptr;
13980 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013981 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013982
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013983 if (pendingRequest->input_buffer != nullptr) {
13984 camera3_capture_result result = {};
13985 result.frame_number = pendingRequest->frame_number;
13986 result.result = nullptr;
13987 result.input_buffer = pendingRequest->input_buffer;
13988 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013989 }
13990
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013991 mShutterDispatcher.clear(pendingRequest->frame_number);
13992 pendingRequest = mPendingRequestsList.erase(pendingRequest);
13993 } else {
13994 // If both buffers and result metadata weren't sent yet, notify about a request error
13995 // and return buffers with error.
13996 for (auto &info : pendingBuffer->mPendingBufferList) {
13997 camera3_notify_msg_t notify_msg;
13998 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13999 notify_msg.type = CAMERA3_MSG_ERROR;
14000 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14001 notify_msg.message.error.error_stream = info.stream;
14002 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14003 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014004
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014005 camera3_stream_buffer_t buffer = {};
14006 buffer.acquire_fence = -1;
14007 buffer.release_fence = -1;
14008 buffer.buffer = info.buffer;
14009 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14010 buffer.stream = info.stream;
14011 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14012 }
14013
14014 if (pendingRequest->input_buffer != nullptr) {
14015 camera3_capture_result result = {};
14016 result.frame_number = pendingRequest->frame_number;
14017 result.result = nullptr;
14018 result.input_buffer = pendingRequest->input_buffer;
14019 orchestrateResult(&result);
14020 }
14021
14022 mShutterDispatcher.clear(pendingRequest->frame_number);
14023 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14024 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014025 }
14026 }
14027
14028 /* Reset pending frame Drop list and requests list */
14029 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014030 mShutterDispatcher.clear();
14031 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014032 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014033 LOGH("Cleared all the pending buffers ");
14034
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014035 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014036}
14037
14038bool QCamera3HardwareInterface::isOnEncoder(
14039 const cam_dimension_t max_viewfinder_size,
14040 uint32_t width, uint32_t height)
14041{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014042 return ((width > (uint32_t)max_viewfinder_size.width) ||
14043 (height > (uint32_t)max_viewfinder_size.height) ||
14044 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14045 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014046}
14047
14048/*===========================================================================
14049 * FUNCTION : setBundleInfo
14050 *
14051 * DESCRIPTION: Set bundle info for all streams that are bundle.
14052 *
14053 * PARAMETERS : None
14054 *
14055 * RETURN : NO_ERROR on success
14056 * Error codes on failure
14057 *==========================================================================*/
14058int32_t QCamera3HardwareInterface::setBundleInfo()
14059{
14060 int32_t rc = NO_ERROR;
14061
14062 if (mChannelHandle) {
14063 cam_bundle_config_t bundleInfo;
14064 memset(&bundleInfo, 0, sizeof(bundleInfo));
14065 rc = mCameraHandle->ops->get_bundle_info(
14066 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14067 if (rc != NO_ERROR) {
14068 LOGE("get_bundle_info failed");
14069 return rc;
14070 }
14071 if (mAnalysisChannel) {
14072 mAnalysisChannel->setBundleInfo(bundleInfo);
14073 }
14074 if (mSupportChannel) {
14075 mSupportChannel->setBundleInfo(bundleInfo);
14076 }
14077 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14078 it != mStreamInfo.end(); it++) {
14079 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14080 channel->setBundleInfo(bundleInfo);
14081 }
14082 if (mRawDumpChannel) {
14083 mRawDumpChannel->setBundleInfo(bundleInfo);
14084 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014085 if (mHdrPlusRawSrcChannel) {
14086 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14087 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014088 }
14089
14090 return rc;
14091}
14092
14093/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014094 * FUNCTION : setInstantAEC
14095 *
14096 * DESCRIPTION: Set Instant AEC related params.
14097 *
14098 * PARAMETERS :
14099 * @meta: CameraMetadata reference
14100 *
14101 * RETURN : NO_ERROR on success
14102 * Error codes on failure
14103 *==========================================================================*/
14104int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14105{
14106 int32_t rc = NO_ERROR;
14107 uint8_t val = 0;
14108 char prop[PROPERTY_VALUE_MAX];
14109
14110 // First try to configure instant AEC from framework metadata
14111 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14112 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14113 }
14114
14115 // If framework did not set this value, try to read from set prop.
14116 if (val == 0) {
14117 memset(prop, 0, sizeof(prop));
14118 property_get("persist.camera.instant.aec", prop, "0");
14119 val = (uint8_t)atoi(prop);
14120 }
14121
14122 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14123 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14124 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14125 mInstantAEC = val;
14126 mInstantAECSettledFrameNumber = 0;
14127 mInstantAecFrameIdxCount = 0;
14128 LOGH("instantAEC value set %d",val);
14129 if (mInstantAEC) {
14130 memset(prop, 0, sizeof(prop));
14131 property_get("persist.camera.ae.instant.bound", prop, "10");
14132 int32_t aec_frame_skip_cnt = atoi(prop);
14133 if (aec_frame_skip_cnt >= 0) {
14134 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14135 } else {
14136 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14137 rc = BAD_VALUE;
14138 }
14139 }
14140 } else {
14141 LOGE("Bad instant aec value set %d", val);
14142 rc = BAD_VALUE;
14143 }
14144 return rc;
14145}
14146
14147/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014148 * FUNCTION : get_num_overall_buffers
14149 *
14150 * DESCRIPTION: Estimate number of pending buffers across all requests.
14151 *
14152 * PARAMETERS : None
14153 *
14154 * RETURN : Number of overall pending buffers
14155 *
14156 *==========================================================================*/
14157uint32_t PendingBuffersMap::get_num_overall_buffers()
14158{
14159 uint32_t sum_buffers = 0;
14160 for (auto &req : mPendingBuffersInRequest) {
14161 sum_buffers += req.mPendingBufferList.size();
14162 }
14163 return sum_buffers;
14164}
14165
14166/*===========================================================================
14167 * FUNCTION : removeBuf
14168 *
14169 * DESCRIPTION: Remove a matching buffer from tracker.
14170 *
14171 * PARAMETERS : @buffer: image buffer for the callback
14172 *
14173 * RETURN : None
14174 *
14175 *==========================================================================*/
14176void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14177{
14178 bool buffer_found = false;
14179 for (auto req = mPendingBuffersInRequest.begin();
14180 req != mPendingBuffersInRequest.end(); req++) {
14181 for (auto k = req->mPendingBufferList.begin();
14182 k != req->mPendingBufferList.end(); k++ ) {
14183 if (k->buffer == buffer) {
14184 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14185 req->frame_number, buffer);
14186 k = req->mPendingBufferList.erase(k);
14187 if (req->mPendingBufferList.empty()) {
14188 // Remove this request from Map
14189 req = mPendingBuffersInRequest.erase(req);
14190 }
14191 buffer_found = true;
14192 break;
14193 }
14194 }
14195 if (buffer_found) {
14196 break;
14197 }
14198 }
14199 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14200 get_num_overall_buffers());
14201}
14202
14203/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014204 * FUNCTION : getBufErrStatus
14205 *
14206 * DESCRIPTION: get buffer error status
14207 *
14208 * PARAMETERS : @buffer: buffer handle
14209 *
14210 * RETURN : Error status
14211 *
14212 *==========================================================================*/
14213int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14214{
14215 for (auto& req : mPendingBuffersInRequest) {
14216 for (auto& k : req.mPendingBufferList) {
14217 if (k.buffer == buffer)
14218 return k.bufStatus;
14219 }
14220 }
14221 return CAMERA3_BUFFER_STATUS_OK;
14222}
14223
14224/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014225 * FUNCTION : setPAAFSupport
14226 *
14227 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14228 * feature mask according to stream type and filter
14229 * arrangement
14230 *
14231 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14232 * @stream_type: stream type
14233 * @filter_arrangement: filter arrangement
14234 *
14235 * RETURN : None
14236 *==========================================================================*/
14237void QCamera3HardwareInterface::setPAAFSupport(
14238 cam_feature_mask_t& feature_mask,
14239 cam_stream_type_t stream_type,
14240 cam_color_filter_arrangement_t filter_arrangement)
14241{
Thierry Strudel3d639192016-09-09 11:52:26 -070014242 switch (filter_arrangement) {
14243 case CAM_FILTER_ARRANGEMENT_RGGB:
14244 case CAM_FILTER_ARRANGEMENT_GRBG:
14245 case CAM_FILTER_ARRANGEMENT_GBRG:
14246 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014247 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14248 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014249 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014250 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14251 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014252 }
14253 break;
14254 case CAM_FILTER_ARRANGEMENT_Y:
14255 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14256 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14257 }
14258 break;
14259 default:
14260 break;
14261 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014262 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14263 feature_mask, stream_type, filter_arrangement);
14264
14265
Thierry Strudel3d639192016-09-09 11:52:26 -070014266}
14267
14268/*===========================================================================
14269* FUNCTION : getSensorMountAngle
14270*
14271* DESCRIPTION: Retrieve sensor mount angle
14272*
14273* PARAMETERS : None
14274*
14275* RETURN : sensor mount angle in uint32_t
14276*==========================================================================*/
14277uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14278{
14279 return gCamCapability[mCameraId]->sensor_mount_angle;
14280}
14281
14282/*===========================================================================
14283* FUNCTION : getRelatedCalibrationData
14284*
14285* DESCRIPTION: Retrieve related system calibration data
14286*
14287* PARAMETERS : None
14288*
14289* RETURN : Pointer of related system calibration data
14290*==========================================================================*/
14291const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14292{
14293 return (const cam_related_system_calibration_data_t *)
14294 &(gCamCapability[mCameraId]->related_cam_calibration);
14295}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014296
14297/*===========================================================================
14298 * FUNCTION : is60HzZone
14299 *
14300 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14301 *
14302 * PARAMETERS : None
14303 *
14304 * RETURN : True if in 60Hz zone, False otherwise
14305 *==========================================================================*/
14306bool QCamera3HardwareInterface::is60HzZone()
14307{
14308 time_t t = time(NULL);
14309 struct tm lt;
14310
14311 struct tm* r = localtime_r(&t, &lt);
14312
14313 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14314 return true;
14315 else
14316 return false;
14317}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014318
14319/*===========================================================================
14320 * FUNCTION : adjustBlackLevelForCFA
14321 *
14322 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14323 * of bayer CFA (Color Filter Array).
14324 *
14325 * PARAMETERS : @input: black level pattern in the order of RGGB
14326 * @output: black level pattern in the order of CFA
14327 * @color_arrangement: CFA color arrangement
14328 *
14329 * RETURN : None
14330 *==========================================================================*/
14331template<typename T>
14332void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14333 T input[BLACK_LEVEL_PATTERN_CNT],
14334 T output[BLACK_LEVEL_PATTERN_CNT],
14335 cam_color_filter_arrangement_t color_arrangement)
14336{
14337 switch (color_arrangement) {
14338 case CAM_FILTER_ARRANGEMENT_GRBG:
14339 output[0] = input[1];
14340 output[1] = input[0];
14341 output[2] = input[3];
14342 output[3] = input[2];
14343 break;
14344 case CAM_FILTER_ARRANGEMENT_GBRG:
14345 output[0] = input[2];
14346 output[1] = input[3];
14347 output[2] = input[0];
14348 output[3] = input[1];
14349 break;
14350 case CAM_FILTER_ARRANGEMENT_BGGR:
14351 output[0] = input[3];
14352 output[1] = input[2];
14353 output[2] = input[1];
14354 output[3] = input[0];
14355 break;
14356 case CAM_FILTER_ARRANGEMENT_RGGB:
14357 output[0] = input[0];
14358 output[1] = input[1];
14359 output[2] = input[2];
14360 output[3] = input[3];
14361 break;
14362 default:
14363 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14364 break;
14365 }
14366}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014367
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014368void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14369 CameraMetadata &resultMetadata,
14370 std::shared_ptr<metadata_buffer_t> settings)
14371{
14372 if (settings == nullptr) {
14373 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14374 return;
14375 }
14376
14377 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14378 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14379 }
14380
14381 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14382 String8 str((const char *)gps_methods);
14383 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14384 }
14385
14386 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14387 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14388 }
14389
14390 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14391 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14392 }
14393
14394 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14395 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14396 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14397 }
14398
14399 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14400 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14401 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14402 }
14403
14404 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14405 int32_t fwk_thumb_size[2];
14406 fwk_thumb_size[0] = thumb_size->width;
14407 fwk_thumb_size[1] = thumb_size->height;
14408 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14409 }
14410
14411 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14412 uint8_t fwk_intent = intent[0];
14413 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14414 }
14415}
14416
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014417bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14418 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14419 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014420{
14421 if (hdrPlusRequest == nullptr) return false;
14422
14423 // Check noise reduction mode is high quality.
14424 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14425 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14426 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014427 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14428 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014429 return false;
14430 }
14431
14432 // Check edge mode is high quality.
14433 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14434 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14435 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14436 return false;
14437 }
14438
14439 if (request.num_output_buffers != 1 ||
14440 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14441 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014442 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14443 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14444 request.output_buffers[0].stream->width,
14445 request.output_buffers[0].stream->height,
14446 request.output_buffers[0].stream->format);
14447 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014448 return false;
14449 }
14450
14451 // Get a YUV buffer from pic channel.
14452 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14453 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14454 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14455 if (res != OK) {
14456 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14457 __FUNCTION__, strerror(-res), res);
14458 return false;
14459 }
14460
14461 pbcamera::StreamBuffer buffer;
14462 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014463 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014464 buffer.data = yuvBuffer->buffer;
14465 buffer.dataSize = yuvBuffer->frame_len;
14466
14467 pbcamera::CaptureRequest pbRequest;
14468 pbRequest.id = request.frame_number;
14469 pbRequest.outputBuffers.push_back(buffer);
14470
14471 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014472 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014473 if (res != OK) {
14474 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14475 strerror(-res), res);
14476 return false;
14477 }
14478
14479 hdrPlusRequest->yuvBuffer = yuvBuffer;
14480 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14481
14482 return true;
14483}
14484
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014485status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14486{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014487 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14488 return OK;
14489 }
14490
14491 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14492 if (res != OK) {
14493 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14494 strerror(-res), res);
14495 return res;
14496 }
14497 gHdrPlusClientOpening = true;
14498
14499 return OK;
14500}
14501
Chien-Yu Chenee335912017-02-09 17:53:20 -080014502status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14503{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014504 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014505
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014506 if (mHdrPlusModeEnabled) {
14507 return OK;
14508 }
14509
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014510 // Check if gHdrPlusClient is opened or being opened.
14511 if (gHdrPlusClient == nullptr) {
14512 if (gHdrPlusClientOpening) {
14513 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14514 return OK;
14515 }
14516
14517 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014518 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014519 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14520 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014521 return res;
14522 }
14523
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014524 // When opening HDR+ client completes, HDR+ mode will be enabled.
14525 return OK;
14526
Chien-Yu Chenee335912017-02-09 17:53:20 -080014527 }
14528
14529 // Configure stream for HDR+.
14530 res = configureHdrPlusStreamsLocked();
14531 if (res != OK) {
14532 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014533 return res;
14534 }
14535
14536 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14537 res = gHdrPlusClient->setZslHdrPlusMode(true);
14538 if (res != OK) {
14539 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014540 return res;
14541 }
14542
14543 mHdrPlusModeEnabled = true;
14544 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14545
14546 return OK;
14547}
14548
14549void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14550{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014551 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014552 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014553 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14554 if (res != OK) {
14555 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14556 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014557
14558 // Close HDR+ client so Easel can enter low power mode.
14559 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14560 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014561 }
14562
14563 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014564 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014565 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14566}
14567
14568status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014569{
14570 pbcamera::InputConfiguration inputConfig;
14571 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14572 status_t res = OK;
14573
14574 // Configure HDR+ client streams.
14575 // Get input config.
14576 if (mHdrPlusRawSrcChannel) {
14577 // HDR+ input buffers will be provided by HAL.
14578 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14579 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14580 if (res != OK) {
14581 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14582 __FUNCTION__, strerror(-res), res);
14583 return res;
14584 }
14585
14586 inputConfig.isSensorInput = false;
14587 } else {
14588 // Sensor MIPI will send data to Easel.
14589 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014590 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014591 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14592 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14593 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14594 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14595 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014596 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014597 if (mSensorModeInfo.num_raw_bits != 10) {
14598 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14599 mSensorModeInfo.num_raw_bits);
14600 return BAD_VALUE;
14601 }
14602
14603 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014604 }
14605
14606 // Get output configurations.
14607 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014608 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014609
14610 // Easel may need to output YUV output buffers if mPictureChannel was created.
14611 pbcamera::StreamConfiguration yuvOutputConfig;
14612 if (mPictureChannel != nullptr) {
14613 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14614 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14615 if (res != OK) {
14616 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14617 __FUNCTION__, strerror(-res), res);
14618
14619 return res;
14620 }
14621
14622 outputStreamConfigs.push_back(yuvOutputConfig);
14623 }
14624
14625 // TODO: consider other channels for YUV output buffers.
14626
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014627 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014628 if (res != OK) {
14629 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14630 strerror(-res), res);
14631 return res;
14632 }
14633
14634 return OK;
14635}
14636
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014637void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14638{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014639 if (client == nullptr) {
14640 ALOGE("%s: Opened client is null.", __FUNCTION__);
14641 return;
14642 }
14643
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014644 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014645 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14646
14647 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014648 if (!gHdrPlusClientOpening) {
14649 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14650 return;
14651 }
14652
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014653 gHdrPlusClient = std::move(client);
14654 gHdrPlusClientOpening = false;
14655
14656 // Set static metadata.
14657 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14658 if (res != OK) {
14659 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14660 __FUNCTION__, strerror(-res), res);
14661 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14662 gHdrPlusClient = nullptr;
14663 return;
14664 }
14665
14666 // Enable HDR+ mode.
14667 res = enableHdrPlusModeLocked();
14668 if (res != OK) {
14669 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14670 }
14671}
14672
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014673void QCamera3HardwareInterface::onOpenFailed(status_t err)
14674{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014675 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14676 Mutex::Autolock l(gHdrPlusClientLock);
14677 gHdrPlusClientOpening = false;
14678}
14679
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014680void QCamera3HardwareInterface::onFatalError()
14681{
14682 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14683
14684 // Set HAL state to error.
14685 pthread_mutex_lock(&mMutex);
14686 mState = ERROR;
14687 pthread_mutex_unlock(&mMutex);
14688
14689 handleCameraDeviceError();
14690}
14691
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014692void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014693 const camera_metadata_t &resultMetadata)
14694{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014695 if (result != nullptr) {
14696 if (result->outputBuffers.size() != 1) {
14697 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14698 result->outputBuffers.size());
14699 return;
14700 }
14701
14702 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14703 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14704 result->outputBuffers[0].streamId);
14705 return;
14706 }
14707
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014708 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014709 HdrPlusPendingRequest pendingRequest;
14710 {
14711 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14712 auto req = mHdrPlusPendingRequests.find(result->requestId);
14713 pendingRequest = req->second;
14714 }
14715
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014716 // Update the result metadata with the settings of the HDR+ still capture request because
14717 // the result metadata belongs to a ZSL buffer.
14718 CameraMetadata metadata;
14719 metadata = &resultMetadata;
14720 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14721 camera_metadata_t* updatedResultMetadata = metadata.release();
14722
14723 QCamera3PicChannel *picChannel =
14724 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14725
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014726 // Check if dumping HDR+ YUV output is enabled.
14727 char prop[PROPERTY_VALUE_MAX];
14728 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14729 bool dumpYuvOutput = atoi(prop);
14730
14731 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014732 // Dump yuv buffer to a ppm file.
14733 pbcamera::StreamConfiguration outputConfig;
14734 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14735 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14736 if (rc == OK) {
14737 char buf[FILENAME_MAX] = {};
14738 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14739 result->requestId, result->outputBuffers[0].streamId,
14740 outputConfig.image.width, outputConfig.image.height);
14741
14742 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14743 } else {
14744 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14745 __FUNCTION__, strerror(-rc), rc);
14746 }
14747 }
14748
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014749 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14750 auto halMetadata = std::make_shared<metadata_buffer_t>();
14751 clear_metadata_buffer(halMetadata.get());
14752
14753 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14754 // encoding.
14755 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14756 halStreamId, /*minFrameDuration*/0);
14757 if (res == OK) {
14758 // Return the buffer to pic channel for encoding.
14759 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14760 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14761 halMetadata);
14762 } else {
14763 // Return the buffer without encoding.
14764 // TODO: This should not happen but we may want to report an error buffer to camera
14765 // service.
14766 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14767 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14768 strerror(-res), res);
14769 }
14770
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014771 // Find the timestamp
14772 camera_metadata_ro_entry_t entry;
14773 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14774 ANDROID_SENSOR_TIMESTAMP, &entry);
14775 if (res != OK) {
14776 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14777 __FUNCTION__, result->requestId, strerror(-res), res);
14778 } else {
14779 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14780 }
14781
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014782 // Send HDR+ metadata to framework.
14783 {
14784 pthread_mutex_lock(&mMutex);
14785
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014786 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14787 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014788 pthread_mutex_unlock(&mMutex);
14789 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014790
14791 // Remove the HDR+ pending request.
14792 {
14793 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14794 auto req = mHdrPlusPendingRequests.find(result->requestId);
14795 mHdrPlusPendingRequests.erase(req);
14796 }
14797 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014798}
14799
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014800void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14801{
14802 if (failedResult == nullptr) {
14803 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14804 return;
14805 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014806
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014807 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014808
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014809 // Remove the pending HDR+ request.
14810 {
14811 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14812 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14813
14814 // Return the buffer to pic channel.
14815 QCamera3PicChannel *picChannel =
14816 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14817 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14818
14819 mHdrPlusPendingRequests.erase(pendingRequest);
14820 }
14821
14822 pthread_mutex_lock(&mMutex);
14823
14824 // Find the pending buffers.
14825 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14826 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14827 if (pendingBuffers->frame_number == failedResult->requestId) {
14828 break;
14829 }
14830 pendingBuffers++;
14831 }
14832
14833 // Send out buffer errors for the pending buffers.
14834 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14835 std::vector<camera3_stream_buffer_t> streamBuffers;
14836 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14837 // Prepare a stream buffer.
14838 camera3_stream_buffer_t streamBuffer = {};
14839 streamBuffer.stream = buffer.stream;
14840 streamBuffer.buffer = buffer.buffer;
14841 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14842 streamBuffer.acquire_fence = -1;
14843 streamBuffer.release_fence = -1;
14844
14845 streamBuffers.push_back(streamBuffer);
14846
14847 // Send out error buffer event.
14848 camera3_notify_msg_t notify_msg = {};
14849 notify_msg.type = CAMERA3_MSG_ERROR;
14850 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14851 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14852 notify_msg.message.error.error_stream = buffer.stream;
14853
14854 orchestrateNotify(&notify_msg);
14855 }
14856
14857 camera3_capture_result_t result = {};
14858 result.frame_number = pendingBuffers->frame_number;
14859 result.num_output_buffers = streamBuffers.size();
14860 result.output_buffers = &streamBuffers[0];
14861
14862 // Send out result with buffer errors.
14863 orchestrateResult(&result);
14864
14865 // Remove pending buffers.
14866 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14867 }
14868
14869 // Remove pending request.
14870 auto halRequest = mPendingRequestsList.begin();
14871 while (halRequest != mPendingRequestsList.end()) {
14872 if (halRequest->frame_number == failedResult->requestId) {
14873 mPendingRequestsList.erase(halRequest);
14874 break;
14875 }
14876 halRequest++;
14877 }
14878
14879 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014880}
14881
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014882
14883ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14884 mParent(parent) {}
14885
14886void ShutterDispatcher::expectShutter(uint32_t frameNumber)
14887{
14888 std::lock_guard<std::mutex> lock(mLock);
14889 mShutters.emplace(frameNumber, Shutter());
14890}
14891
14892void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14893{
14894 std::lock_guard<std::mutex> lock(mLock);
14895
14896 // Make this frame's shutter ready.
14897 auto shutter = mShutters.find(frameNumber);
14898 if (shutter == mShutters.end()) {
14899 // Shutter was already sent.
14900 return;
14901 }
14902
14903 shutter->second.ready = true;
14904 shutter->second.timestamp = timestamp;
14905
14906 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
14907 shutter = mShutters.begin();
14908 while (shutter != mShutters.end()) {
14909 if (!shutter->second.ready) {
14910 // If this shutter is not ready, the following shutters can't be sent.
14911 break;
14912 }
14913
14914 camera3_notify_msg_t msg = {};
14915 msg.type = CAMERA3_MSG_SHUTTER;
14916 msg.message.shutter.frame_number = shutter->first;
14917 msg.message.shutter.timestamp = shutter->second.timestamp;
14918 mParent->orchestrateNotify(&msg);
14919
14920 shutter = mShutters.erase(shutter);
14921 }
14922}
14923
14924void ShutterDispatcher::clear(uint32_t frameNumber)
14925{
14926 std::lock_guard<std::mutex> lock(mLock);
14927 mShutters.erase(frameNumber);
14928}
14929
14930void ShutterDispatcher::clear()
14931{
14932 std::lock_guard<std::mutex> lock(mLock);
14933
14934 // Log errors for stale shutters.
14935 for (auto &shutter : mShutters) {
14936 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14937 __FUNCTION__, shutter.first, shutter.second.ready,
14938 shutter.second.timestamp);
14939 }
14940 mShutters.clear();
14941}
14942
14943OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
14944 mParent(parent) {}
14945
14946status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
14947{
14948 std::lock_guard<std::mutex> lock(mLock);
14949 mStreamBuffers.clear();
14950 if (!streamList) {
14951 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
14952 return -EINVAL;
14953 }
14954
14955 // Create a "frame-number -> buffer" map for each stream.
14956 for (uint32_t i = 0; i < streamList->num_streams; i++) {
14957 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
14958 }
14959
14960 return OK;
14961}
14962
14963status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
14964{
14965 std::lock_guard<std::mutex> lock(mLock);
14966
14967 // Find the "frame-number -> buffer" map for the stream.
14968 auto buffers = mStreamBuffers.find(stream);
14969 if (buffers == mStreamBuffers.end()) {
14970 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
14971 return -EINVAL;
14972 }
14973
14974 // Create an unready buffer for this frame number.
14975 buffers->second.emplace(frameNumber, Buffer());
14976 return OK;
14977}
14978
14979void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
14980 const camera3_stream_buffer_t &buffer)
14981{
14982 std::lock_guard<std::mutex> lock(mLock);
14983
14984 // Find the frame number -> buffer map for the stream.
14985 auto buffers = mStreamBuffers.find(buffer.stream);
14986 if (buffers == mStreamBuffers.end()) {
14987 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
14988 return;
14989 }
14990
14991 // Find the unready buffer this frame number and mark it ready.
14992 auto pendingBuffer = buffers->second.find(frameNumber);
14993 if (pendingBuffer == buffers->second.end()) {
14994 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
14995 return;
14996 }
14997
14998 pendingBuffer->second.ready = true;
14999 pendingBuffer->second.buffer = buffer;
15000
15001 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15002 pendingBuffer = buffers->second.begin();
15003 while (pendingBuffer != buffers->second.end()) {
15004 if (!pendingBuffer->second.ready) {
15005 // If this buffer is not ready, the following buffers can't be sent.
15006 break;
15007 }
15008
15009 camera3_capture_result_t result = {};
15010 result.frame_number = pendingBuffer->first;
15011 result.num_output_buffers = 1;
15012 result.output_buffers = &pendingBuffer->second.buffer;
15013
15014 // Send out result with buffer errors.
15015 mParent->orchestrateResult(&result);
15016
15017 pendingBuffer = buffers->second.erase(pendingBuffer);
15018 }
15019}
15020
15021void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15022{
15023 std::lock_guard<std::mutex> lock(mLock);
15024
15025 // Log errors for stale buffers.
15026 for (auto &buffers : mStreamBuffers) {
15027 for (auto &buffer : buffers.second) {
15028 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15029 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15030 }
15031 buffers.second.clear();
15032 }
15033
15034 if (clearConfiguredStreams) {
15035 mStreamBuffers.clear();
15036 }
15037}
15038
Thierry Strudel3d639192016-09-09 11:52:26 -070015039}; //end namespace qcamera