blob: 84ccdbef1534dc081c974b39940d8c78fd528621 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
Jiyong Parkd4caeb72017-06-12 17:16:36 +090068using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070069using namespace android;
70
71namespace qcamera {
72
73#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
74
75#define EMPTY_PIPELINE_DELAY 2
76#define PARTIAL_RESULT_COUNT 2
77#define FRAME_SKIP_DELAY 0
78
79#define MAX_VALUE_8BIT ((1<<8)-1)
80#define MAX_VALUE_10BIT ((1<<10)-1)
81#define MAX_VALUE_12BIT ((1<<12)-1)
82
83#define VIDEO_4K_WIDTH 3840
84#define VIDEO_4K_HEIGHT 2160
85
Jason Leeb9e76432017-03-10 17:14:19 -080086#define MAX_EIS_WIDTH 3840
87#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070088
89#define MAX_RAW_STREAMS 1
90#define MAX_STALLING_STREAMS 1
91#define MAX_PROCESSED_STREAMS 3
92/* Batch mode is enabled only if FPS set is equal to or greater than this */
93#define MIN_FPS_FOR_BATCH_MODE (120)
94#define PREVIEW_FPS_FOR_HFR (30)
95#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080096#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070097#define MAX_HFR_BATCH_SIZE (8)
98#define REGIONS_TUPLE_COUNT 5
99#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
101#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700137// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
138#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
139
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700140// Whether to check for the GPU stride padding, or use the default
141//#define CHECK_GPU_PIXEL_ALIGNMENT
142
Thierry Strudel3d639192016-09-09 11:52:26 -0700143cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
144const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
145extern pthread_mutex_t gCamLock;
146volatile uint32_t gCamHal3LogLevel = 1;
147extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700148
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800149// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150// The following Easel related variables must be protected by gHdrPlusClientLock.
151EaselManagerClient gEaselManagerClient;
152bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
153std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
154bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700155bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700156bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700157
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800158// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
159bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
161Mutex gHdrPlusClientLock; // Protect above Easel related variables.
162
Thierry Strudel3d639192016-09-09 11:52:26 -0700163
164const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
165 {"On", CAM_CDS_MODE_ON},
166 {"Off", CAM_CDS_MODE_OFF},
167 {"Auto",CAM_CDS_MODE_AUTO}
168};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_video_hdr_mode_t,
171 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
172 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
173 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
174};
175
Thierry Strudel54dc9782017-02-15 12:12:10 -0800176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_binning_correction_mode_t,
178 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
179 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
180 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
181};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700182
183const QCamera3HardwareInterface::QCameraMap<
184 camera_metadata_enum_android_ir_mode_t,
185 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
186 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
187 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
188 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
189};
Thierry Strudel3d639192016-09-09 11:52:26 -0700190
191const QCamera3HardwareInterface::QCameraMap<
192 camera_metadata_enum_android_control_effect_mode_t,
193 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
194 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
195 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
196 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
197 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
199 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
200 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
202 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
203};
204
205const QCamera3HardwareInterface::QCameraMap<
206 camera_metadata_enum_android_control_awb_mode_t,
207 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
208 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
209 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
210 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
211 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
212 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
213 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
215 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
216 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220 camera_metadata_enum_android_control_scene_mode_t,
221 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
222 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
223 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
224 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
225 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
227 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
228 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
229 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
230 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
231 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
232 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
233 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
234 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
235 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
236 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800237 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
238 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700239};
240
241const QCamera3HardwareInterface::QCameraMap<
242 camera_metadata_enum_android_control_af_mode_t,
243 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
245 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
246 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
247 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
248 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
250 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
251};
252
253const QCamera3HardwareInterface::QCameraMap<
254 camera_metadata_enum_android_color_correction_aberration_mode_t,
255 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
257 CAM_COLOR_CORRECTION_ABERRATION_OFF },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
259 CAM_COLOR_CORRECTION_ABERRATION_FAST },
260 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
261 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
262};
263
264const QCamera3HardwareInterface::QCameraMap<
265 camera_metadata_enum_android_control_ae_antibanding_mode_t,
266 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
270 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
271};
272
273const QCamera3HardwareInterface::QCameraMap<
274 camera_metadata_enum_android_control_ae_mode_t,
275 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
276 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
279 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
280 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100463 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800464 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mChannelHandle(0),
466 mFirstConfiguration(true),
467 mFlush(false),
468 mFlushPerf(false),
469 mParamHeap(NULL),
470 mParameters(NULL),
471 mPrevParameters(NULL),
472 m_bIsVideo(false),
473 m_bIs4KVideo(false),
474 m_bEisSupportedSize(false),
475 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800476 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700478 mShutterDispatcher(this),
479 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 mMinProcessedFrameDuration(0),
481 mMinJpegFrameDuration(0),
482 mMinRawFrameDuration(0),
483 mMetaFrameCount(0U),
484 mUpdateDebugLevel(false),
485 mCallbacks(callbacks),
486 mCaptureIntent(0),
487 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700488 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800489 /* DevCamDebug metadata internal m control*/
490 mDevCamDebugMetaEnable(0),
491 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700492 mBatchSize(0),
493 mToBeQueuedVidBufs(0),
494 mHFRVideoFps(DEFAULT_VIDEO_FPS),
495 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800496 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800497 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700498 mFirstFrameNumberInBatch(0),
499 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800500 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700501 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
502 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000503 mPDSupported(false),
504 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700505 mInstantAEC(false),
506 mResetInstantAEC(false),
507 mInstantAECSettledFrameNumber(0),
508 mAecSkipDisplayFrameBound(0),
509 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800510 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700513 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mState(CLOSED),
515 mIsDeviceLinked(false),
516 mIsMainCamera(true),
517 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700518 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800519 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700521 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800522 mIsApInputUsedForHdrPlus(false),
523 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800524 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700525{
526 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCommon.init(gCamCapability[cameraId]);
528 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700529#ifndef USE_HAL_3_3
530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
531#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700533#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.close = close_camera_device;
535 mCameraDevice.ops = &mCameraOps;
536 mCameraDevice.priv = this;
537 gCamCapability[cameraId]->version = CAM_HAL_V3;
538 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
539 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
540 gCamCapability[cameraId]->min_num_pp_bufs = 3;
541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545 mPendingLiveRequest = 0;
546 mCurrentRequestId = -1;
547 pthread_mutex_init(&mMutex, NULL);
548
549 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
550 mDefaultMetadata[i] = NULL;
551
552 // Getting system props of different kinds
553 char prop[PROPERTY_VALUE_MAX];
554 memset(prop, 0, sizeof(prop));
555 property_get("persist.camera.raw.dump", prop, "0");
556 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800557 property_get("persist.camera.hal3.force.hdr", prop, "0");
558 mForceHdrSnapshot = atoi(prop);
559
Thierry Strudel3d639192016-09-09 11:52:26 -0700560 if (mEnableRawDump)
561 LOGD("Raw dump from Camera HAL enabled");
562
563 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
564 memset(mLdafCalib, 0, sizeof(mLdafCalib));
565
566 memset(prop, 0, sizeof(prop));
567 property_get("persist.camera.tnr.preview", prop, "0");
568 m_bTnrPreview = (uint8_t)atoi(prop);
569
570 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800571 property_get("persist.camera.swtnr.preview", prop, "1");
572 m_bSwTnrPreview = (uint8_t)atoi(prop);
573
574 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700575 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 m_bTnrVideo = (uint8_t)atoi(prop);
577
578 memset(prop, 0, sizeof(prop));
579 property_get("persist.camera.avtimer.debug", prop, "0");
580 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800581 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700582
Thierry Strudel54dc9782017-02-15 12:12:10 -0800583 memset(prop, 0, sizeof(prop));
584 property_get("persist.camera.cacmode.disable", prop, "0");
585 m_cacModeDisabled = (uint8_t)atoi(prop);
586
Thierry Strudel3d639192016-09-09 11:52:26 -0700587 //Load and read GPU library.
588 lib_surface_utils = NULL;
589 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700590 mSurfaceStridePadding = CAM_PAD_TO_64;
591#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700592 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
593 if (lib_surface_utils) {
594 *(void **)&LINK_get_surface_pixel_alignment =
595 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
596 if (LINK_get_surface_pixel_alignment) {
597 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
598 }
599 dlclose(lib_surface_utils);
600 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700601#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000602 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
603 mPDSupported = (0 <= mPDIndex) ? true : false;
604
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700605 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700606}
607
608/*===========================================================================
609 * FUNCTION : ~QCamera3HardwareInterface
610 *
611 * DESCRIPTION: destructor of QCamera3HardwareInterface
612 *
613 * PARAMETERS : none
614 *
615 * RETURN : none
616 *==========================================================================*/
617QCamera3HardwareInterface::~QCamera3HardwareInterface()
618{
619 LOGD("E");
620
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800621 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700622
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800623 // Disable power hint and enable the perf lock for close camera
624 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
625 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
626
627 // unlink of dualcam during close camera
628 if (mIsDeviceLinked) {
629 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
630 &m_pDualCamCmdPtr->bundle_info;
631 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
632 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
633 pthread_mutex_lock(&gCamLock);
634
635 if (mIsMainCamera == 1) {
636 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
637 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
638 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
639 // related session id should be session id of linked session
640 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
641 } else {
642 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
643 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
644 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
645 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
646 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800647 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800648 pthread_mutex_unlock(&gCamLock);
649
650 rc = mCameraHandle->ops->set_dual_cam_cmd(
651 mCameraHandle->camera_handle);
652 if (rc < 0) {
653 LOGE("Dualcam: Unlink failed, but still proceed to close");
654 }
655 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700656
657 /* We need to stop all streams before deleting any stream */
658 if (mRawDumpChannel) {
659 mRawDumpChannel->stop();
660 }
661
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700662 if (mHdrPlusRawSrcChannel) {
663 mHdrPlusRawSrcChannel->stop();
664 }
665
Thierry Strudel3d639192016-09-09 11:52:26 -0700666 // NOTE: 'camera3_stream_t *' objects are already freed at
667 // this stage by the framework
668 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
669 it != mStreamInfo.end(); it++) {
670 QCamera3ProcessingChannel *channel = (*it)->channel;
671 if (channel) {
672 channel->stop();
673 }
674 }
675 if (mSupportChannel)
676 mSupportChannel->stop();
677
678 if (mAnalysisChannel) {
679 mAnalysisChannel->stop();
680 }
681 if (mMetadataChannel) {
682 mMetadataChannel->stop();
683 }
684 if (mChannelHandle) {
685 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
686 mChannelHandle);
687 LOGD("stopping channel %d", mChannelHandle);
688 }
689
690 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
691 it != mStreamInfo.end(); it++) {
692 QCamera3ProcessingChannel *channel = (*it)->channel;
693 if (channel)
694 delete channel;
695 free (*it);
696 }
697 if (mSupportChannel) {
698 delete mSupportChannel;
699 mSupportChannel = NULL;
700 }
701
702 if (mAnalysisChannel) {
703 delete mAnalysisChannel;
704 mAnalysisChannel = NULL;
705 }
706 if (mRawDumpChannel) {
707 delete mRawDumpChannel;
708 mRawDumpChannel = NULL;
709 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700710 if (mHdrPlusRawSrcChannel) {
711 delete mHdrPlusRawSrcChannel;
712 mHdrPlusRawSrcChannel = NULL;
713 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700714 if (mDummyBatchChannel) {
715 delete mDummyBatchChannel;
716 mDummyBatchChannel = NULL;
717 }
718
719 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800720 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700721
722 if (mMetadataChannel) {
723 delete mMetadataChannel;
724 mMetadataChannel = NULL;
725 }
726
727 /* Clean up all channels */
728 if (mCameraInitialized) {
729 if(!mFirstConfiguration){
730 //send the last unconfigure
731 cam_stream_size_info_t stream_config_info;
732 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
733 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
734 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800735 m_bIs4KVideo ? 0 :
736 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700737 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
739 stream_config_info);
740 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
741 if (rc < 0) {
742 LOGE("set_parms failed for unconfigure");
743 }
744 }
745 deinitParameters();
746 }
747
748 if (mChannelHandle) {
749 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
750 mChannelHandle);
751 LOGH("deleting channel %d", mChannelHandle);
752 mChannelHandle = 0;
753 }
754
755 if (mState != CLOSED)
756 closeCamera();
757
758 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
759 req.mPendingBufferList.clear();
760 }
761 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 for (pendingRequestIterator i = mPendingRequestsList.begin();
763 i != mPendingRequestsList.end();) {
764 i = erasePendingRequest(i);
765 }
766 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
767 if (mDefaultMetadata[i])
768 free_camera_metadata(mDefaultMetadata[i]);
769
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800770 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700771
772 pthread_cond_destroy(&mRequestCond);
773
774 pthread_cond_destroy(&mBuffersCond);
775
776 pthread_mutex_destroy(&mMutex);
777 LOGD("X");
778}
779
780/*===========================================================================
781 * FUNCTION : erasePendingRequest
782 *
783 * DESCRIPTION: function to erase a desired pending request after freeing any
784 * allocated memory
785 *
786 * PARAMETERS :
787 * @i : iterator pointing to pending request to be erased
788 *
789 * RETURN : iterator pointing to the next request
790 *==========================================================================*/
791QCamera3HardwareInterface::pendingRequestIterator
792 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
793{
794 if (i->input_buffer != NULL) {
795 free(i->input_buffer);
796 i->input_buffer = NULL;
797 }
798 if (i->settings != NULL)
799 free_camera_metadata((camera_metadata_t*)i->settings);
800 return mPendingRequestsList.erase(i);
801}
802
803/*===========================================================================
804 * FUNCTION : camEvtHandle
805 *
806 * DESCRIPTION: Function registered to mm-camera-interface to handle events
807 *
808 * PARAMETERS :
809 * @camera_handle : interface layer camera handle
810 * @evt : ptr to event
811 * @user_data : user data ptr
812 *
813 * RETURN : none
814 *==========================================================================*/
815void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
816 mm_camera_event_t *evt,
817 void *user_data)
818{
819 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
820 if (obj && evt) {
821 switch(evt->server_event_type) {
822 case CAM_EVENT_TYPE_DAEMON_DIED:
823 pthread_mutex_lock(&obj->mMutex);
824 obj->mState = ERROR;
825 pthread_mutex_unlock(&obj->mMutex);
826 LOGE("Fatal, camera daemon died");
827 break;
828
829 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
830 LOGD("HAL got request pull from Daemon");
831 pthread_mutex_lock(&obj->mMutex);
832 obj->mWokenUpByDaemon = true;
833 obj->unblockRequestIfNecessary();
834 pthread_mutex_unlock(&obj->mMutex);
835 break;
836
837 default:
838 LOGW("Warning: Unhandled event %d",
839 evt->server_event_type);
840 break;
841 }
842 } else {
843 LOGE("NULL user_data/evt");
844 }
845}
846
847/*===========================================================================
848 * FUNCTION : openCamera
849 *
850 * DESCRIPTION: open camera
851 *
852 * PARAMETERS :
853 * @hw_device : double ptr for camera device struct
854 *
855 * RETURN : int32_t type of status
856 * NO_ERROR -- success
857 * none-zero failure code
858 *==========================================================================*/
859int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
860{
861 int rc = 0;
862 if (mState != CLOSED) {
863 *hw_device = NULL;
864 return PERMISSION_DENIED;
865 }
866
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700867 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800868 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
870 mCameraId);
871
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700872 if (mCameraHandle) {
873 LOGE("Failure: Camera already opened");
874 return ALREADY_EXISTS;
875 }
876
877 {
878 Mutex::Autolock l(gHdrPlusClientLock);
879 if (gEaselManagerClient.isEaselPresentOnDevice()) {
880 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
881 rc = gEaselManagerClient.resume();
882 if (rc != 0) {
883 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
884 return rc;
885 }
886 }
887 }
888
Thierry Strudel3d639192016-09-09 11:52:26 -0700889 rc = openCamera();
890 if (rc == 0) {
891 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800892 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700893 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700894
895 // Suspend Easel because opening camera failed.
896 {
897 Mutex::Autolock l(gHdrPlusClientLock);
898 if (gEaselManagerClient.isEaselPresentOnDevice()) {
899 status_t suspendErr = gEaselManagerClient.suspend();
900 if (suspendErr != 0) {
901 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
902 strerror(-suspendErr), suspendErr);
903 }
904 }
905 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800906 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700907
Thierry Strudel3d639192016-09-09 11:52:26 -0700908 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
909 mCameraId, rc);
910
911 if (rc == NO_ERROR) {
912 mState = OPENED;
913 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800914
Thierry Strudel3d639192016-09-09 11:52:26 -0700915 return rc;
916}
917
918/*===========================================================================
919 * FUNCTION : openCamera
920 *
921 * DESCRIPTION: open camera
922 *
923 * PARAMETERS : none
924 *
925 * RETURN : int32_t type of status
926 * NO_ERROR -- success
927 * none-zero failure code
928 *==========================================================================*/
929int QCamera3HardwareInterface::openCamera()
930{
931 int rc = 0;
932 char value[PROPERTY_VALUE_MAX];
933
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800934 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800935
Thierry Strudel3d639192016-09-09 11:52:26 -0700936 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
937 if (rc < 0) {
938 LOGE("Failed to reserve flash for camera id: %d",
939 mCameraId);
940 return UNKNOWN_ERROR;
941 }
942
943 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
944 if (rc) {
945 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
946 return rc;
947 }
948
949 if (!mCameraHandle) {
950 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
951 return -ENODEV;
952 }
953
954 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
955 camEvtHandle, (void *)this);
956
957 if (rc < 0) {
958 LOGE("Error, failed to register event callback");
959 /* Not closing camera here since it is already handled in destructor */
960 return FAILED_TRANSACTION;
961 }
962
963 mExifParams.debug_params =
964 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
965 if (mExifParams.debug_params) {
966 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
967 } else {
968 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
969 return NO_MEMORY;
970 }
971 mFirstConfiguration = true;
972
973 //Notify display HAL that a camera session is active.
974 //But avoid calling the same during bootup because camera service might open/close
975 //cameras at boot time during its initialization and display service will also internally
976 //wait for camera service to initialize first while calling this display API, resulting in a
977 //deadlock situation. Since boot time camera open/close calls are made only to fetch
978 //capabilities, no need of this display bw optimization.
979 //Use "service.bootanim.exit" property to know boot status.
980 property_get("service.bootanim.exit", value, "0");
981 if (atoi(value) == 1) {
982 pthread_mutex_lock(&gCamLock);
983 if (gNumCameraSessions++ == 0) {
984 setCameraLaunchStatus(true);
985 }
986 pthread_mutex_unlock(&gCamLock);
987 }
988
989 //fill the session id needed while linking dual cam
990 pthread_mutex_lock(&gCamLock);
991 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
992 &sessionId[mCameraId]);
993 pthread_mutex_unlock(&gCamLock);
994
995 if (rc < 0) {
996 LOGE("Error, failed to get sessiion id");
997 return UNKNOWN_ERROR;
998 } else {
999 //Allocate related cam sync buffer
1000 //this is needed for the payload that goes along with bundling cmd for related
1001 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001002 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1003 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001004 if(rc != OK) {
1005 rc = NO_MEMORY;
1006 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1007 return NO_MEMORY;
1008 }
1009
1010 //Map memory for related cam sync buffer
1011 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001012 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1013 m_pDualCamCmdHeap->getFd(0),
1014 sizeof(cam_dual_camera_cmd_info_t),
1015 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001016 if(rc < 0) {
1017 LOGE("Dualcam: failed to map Related cam sync buffer");
1018 rc = FAILED_TRANSACTION;
1019 return NO_MEMORY;
1020 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001021 m_pDualCamCmdPtr =
1022 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001023 }
1024
1025 LOGH("mCameraId=%d",mCameraId);
1026
1027 return NO_ERROR;
1028}
1029
1030/*===========================================================================
1031 * FUNCTION : closeCamera
1032 *
1033 * DESCRIPTION: close camera
1034 *
1035 * PARAMETERS : none
1036 *
1037 * RETURN : int32_t type of status
1038 * NO_ERROR -- success
1039 * none-zero failure code
1040 *==========================================================================*/
1041int QCamera3HardwareInterface::closeCamera()
1042{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001043 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001044 int rc = NO_ERROR;
1045 char value[PROPERTY_VALUE_MAX];
1046
1047 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1048 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001049
1050 // unmap memory for related cam sync buffer
1051 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001052 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 if (NULL != m_pDualCamCmdHeap) {
1054 m_pDualCamCmdHeap->deallocate();
1055 delete m_pDualCamCmdHeap;
1056 m_pDualCamCmdHeap = NULL;
1057 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001058 }
1059
Thierry Strudel3d639192016-09-09 11:52:26 -07001060 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1061 mCameraHandle = NULL;
1062
1063 //reset session id to some invalid id
1064 pthread_mutex_lock(&gCamLock);
1065 sessionId[mCameraId] = 0xDEADBEEF;
1066 pthread_mutex_unlock(&gCamLock);
1067
1068 //Notify display HAL that there is no active camera session
1069 //but avoid calling the same during bootup. Refer to openCamera
1070 //for more details.
1071 property_get("service.bootanim.exit", value, "0");
1072 if (atoi(value) == 1) {
1073 pthread_mutex_lock(&gCamLock);
1074 if (--gNumCameraSessions == 0) {
1075 setCameraLaunchStatus(false);
1076 }
1077 pthread_mutex_unlock(&gCamLock);
1078 }
1079
Thierry Strudel3d639192016-09-09 11:52:26 -07001080 if (mExifParams.debug_params) {
1081 free(mExifParams.debug_params);
1082 mExifParams.debug_params = NULL;
1083 }
1084 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1085 LOGW("Failed to release flash for camera id: %d",
1086 mCameraId);
1087 }
1088 mState = CLOSED;
1089 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1090 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001091
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001092 {
1093 Mutex::Autolock l(gHdrPlusClientLock);
1094 if (gHdrPlusClient != nullptr) {
1095 // Disable HDR+ mode.
1096 disableHdrPlusModeLocked();
1097 // Disconnect Easel if it's connected.
1098 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1099 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001100 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001101
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001102 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001103 rc = gEaselManagerClient.stopMipi(mCameraId);
1104 if (rc != 0) {
1105 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1106 }
1107
1108 rc = gEaselManagerClient.suspend();
1109 if (rc != 0) {
1110 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1111 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001112 }
1113 }
1114
Thierry Strudel3d639192016-09-09 11:52:26 -07001115 return rc;
1116}
1117
1118/*===========================================================================
1119 * FUNCTION : initialize
1120 *
1121 * DESCRIPTION: Initialize frameworks callback functions
1122 *
1123 * PARAMETERS :
1124 * @callback_ops : callback function to frameworks
1125 *
1126 * RETURN :
1127 *
1128 *==========================================================================*/
1129int QCamera3HardwareInterface::initialize(
1130 const struct camera3_callback_ops *callback_ops)
1131{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001132 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001133 int rc;
1134
1135 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1136 pthread_mutex_lock(&mMutex);
1137
1138 // Validate current state
1139 switch (mState) {
1140 case OPENED:
1141 /* valid state */
1142 break;
1143 default:
1144 LOGE("Invalid state %d", mState);
1145 rc = -ENODEV;
1146 goto err1;
1147 }
1148
1149 rc = initParameters();
1150 if (rc < 0) {
1151 LOGE("initParamters failed %d", rc);
1152 goto err1;
1153 }
1154 mCallbackOps = callback_ops;
1155
1156 mChannelHandle = mCameraHandle->ops->add_channel(
1157 mCameraHandle->camera_handle, NULL, NULL, this);
1158 if (mChannelHandle == 0) {
1159 LOGE("add_channel failed");
1160 rc = -ENOMEM;
1161 pthread_mutex_unlock(&mMutex);
1162 return rc;
1163 }
1164
1165 pthread_mutex_unlock(&mMutex);
1166 mCameraInitialized = true;
1167 mState = INITIALIZED;
1168 LOGI("X");
1169 return 0;
1170
1171err1:
1172 pthread_mutex_unlock(&mMutex);
1173 return rc;
1174}
1175
1176/*===========================================================================
1177 * FUNCTION : validateStreamDimensions
1178 *
1179 * DESCRIPTION: Check if the configuration requested are those advertised
1180 *
1181 * PARAMETERS :
1182 * @stream_list : streams to be configured
1183 *
1184 * RETURN :
1185 *
1186 *==========================================================================*/
1187int QCamera3HardwareInterface::validateStreamDimensions(
1188 camera3_stream_configuration_t *streamList)
1189{
1190 int rc = NO_ERROR;
1191 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001192 uint32_t depthWidth = 0;
1193 uint32_t depthHeight = 0;
1194 if (mPDSupported) {
1195 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1196 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1197 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001198
1199 camera3_stream_t *inputStream = NULL;
1200 /*
1201 * Loop through all streams to find input stream if it exists*
1202 */
1203 for (size_t i = 0; i< streamList->num_streams; i++) {
1204 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1205 if (inputStream != NULL) {
1206 LOGE("Error, Multiple input streams requested");
1207 return -EINVAL;
1208 }
1209 inputStream = streamList->streams[i];
1210 }
1211 }
1212 /*
1213 * Loop through all streams requested in configuration
1214 * Check if unsupported sizes have been requested on any of them
1215 */
1216 for (size_t j = 0; j < streamList->num_streams; j++) {
1217 bool sizeFound = false;
1218 camera3_stream_t *newStream = streamList->streams[j];
1219
1220 uint32_t rotatedHeight = newStream->height;
1221 uint32_t rotatedWidth = newStream->width;
1222 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1223 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1224 rotatedHeight = newStream->width;
1225 rotatedWidth = newStream->height;
1226 }
1227
1228 /*
1229 * Sizes are different for each type of stream format check against
1230 * appropriate table.
1231 */
1232 switch (newStream->format) {
1233 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1234 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1235 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001236 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1237 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1238 mPDSupported) {
1239 if ((depthWidth == newStream->width) &&
1240 (depthHeight == newStream->height)) {
1241 sizeFound = true;
1242 }
1243 break;
1244 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001245 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1246 for (size_t i = 0; i < count; i++) {
1247 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1248 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1249 sizeFound = true;
1250 break;
1251 }
1252 }
1253 break;
1254 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1256 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001257 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001258 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001259 if ((depthSamplesCount == newStream->width) &&
1260 (1 == newStream->height)) {
1261 sizeFound = true;
1262 }
1263 break;
1264 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001265 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1266 /* Verify set size against generated sizes table */
1267 for (size_t i = 0; i < count; i++) {
1268 if (((int32_t)rotatedWidth ==
1269 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1270 ((int32_t)rotatedHeight ==
1271 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1272 sizeFound = true;
1273 break;
1274 }
1275 }
1276 break;
1277 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1278 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1279 default:
1280 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1281 || newStream->stream_type == CAMERA3_STREAM_INPUT
1282 || IS_USAGE_ZSL(newStream->usage)) {
1283 if (((int32_t)rotatedWidth ==
1284 gCamCapability[mCameraId]->active_array_size.width) &&
1285 ((int32_t)rotatedHeight ==
1286 gCamCapability[mCameraId]->active_array_size.height)) {
1287 sizeFound = true;
1288 break;
1289 }
1290 /* We could potentially break here to enforce ZSL stream
1291 * set from frameworks always is full active array size
1292 * but it is not clear from the spc if framework will always
1293 * follow that, also we have logic to override to full array
1294 * size, so keeping the logic lenient at the moment
1295 */
1296 }
1297 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1298 MAX_SIZES_CNT);
1299 for (size_t i = 0; i < count; i++) {
1300 if (((int32_t)rotatedWidth ==
1301 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1302 ((int32_t)rotatedHeight ==
1303 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1304 sizeFound = true;
1305 break;
1306 }
1307 }
1308 break;
1309 } /* End of switch(newStream->format) */
1310
1311 /* We error out even if a single stream has unsupported size set */
1312 if (!sizeFound) {
1313 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1314 rotatedWidth, rotatedHeight, newStream->format,
1315 gCamCapability[mCameraId]->active_array_size.width,
1316 gCamCapability[mCameraId]->active_array_size.height);
1317 rc = -EINVAL;
1318 break;
1319 }
1320 } /* End of for each stream */
1321 return rc;
1322}
1323
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001324/*===========================================================================
1325 * FUNCTION : validateUsageFlags
1326 *
1327 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1328 *
1329 * PARAMETERS :
1330 * @stream_list : streams to be configured
1331 *
1332 * RETURN :
1333 * NO_ERROR if the usage flags are supported
1334 * error code if usage flags are not supported
1335 *
1336 *==========================================================================*/
1337int QCamera3HardwareInterface::validateUsageFlags(
1338 const camera3_stream_configuration_t* streamList)
1339{
1340 for (size_t j = 0; j < streamList->num_streams; j++) {
1341 const camera3_stream_t *newStream = streamList->streams[j];
1342
1343 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1344 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1345 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1346 continue;
1347 }
1348
Jason Leec4cf5032017-05-24 18:31:41 -07001349 // Here we only care whether it's EIS3 or not
1350 char is_type_value[PROPERTY_VALUE_MAX];
1351 property_get("persist.camera.is_type", is_type_value, "4");
1352 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1353 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1354 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1355 isType = IS_TYPE_NONE;
1356
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001357 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1358 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1359 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1360 bool forcePreviewUBWC = true;
1361 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1362 forcePreviewUBWC = false;
1363 }
1364 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001365 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001366 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001367 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001369 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001370
1371 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1372 // So color spaces will always match.
1373
1374 // Check whether underlying formats of shared streams match.
1375 if (isVideo && isPreview && videoFormat != previewFormat) {
1376 LOGE("Combined video and preview usage flag is not supported");
1377 return -EINVAL;
1378 }
1379 if (isPreview && isZSL && previewFormat != zslFormat) {
1380 LOGE("Combined preview and zsl usage flag is not supported");
1381 return -EINVAL;
1382 }
1383 if (isVideo && isZSL && videoFormat != zslFormat) {
1384 LOGE("Combined video and zsl usage flag is not supported");
1385 return -EINVAL;
1386 }
1387 }
1388 return NO_ERROR;
1389}
1390
1391/*===========================================================================
1392 * FUNCTION : validateUsageFlagsForEis
1393 *
1394 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 *
1399 * RETURN :
1400 * NO_ERROR if the usage flags are supported
1401 * error code if usage flags are not supported
1402 *
1403 *==========================================================================*/
1404int QCamera3HardwareInterface::validateUsageFlagsForEis(
1405 const camera3_stream_configuration_t* streamList)
1406{
1407 for (size_t j = 0; j < streamList->num_streams; j++) {
1408 const camera3_stream_t *newStream = streamList->streams[j];
1409
1410 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1411 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1412
1413 // Because EIS is "hard-coded" for certain use case, and current
1414 // implementation doesn't support shared preview and video on the same
1415 // stream, return failure if EIS is forced on.
1416 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1417 LOGE("Combined video and preview usage flag is not supported due to EIS");
1418 return -EINVAL;
1419 }
1420 }
1421 return NO_ERROR;
1422}
1423
Thierry Strudel3d639192016-09-09 11:52:26 -07001424/*==============================================================================
1425 * FUNCTION : isSupportChannelNeeded
1426 *
1427 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1428 *
1429 * PARAMETERS :
1430 * @stream_list : streams to be configured
1431 * @stream_config_info : the config info for streams to be configured
1432 *
1433 * RETURN : Boolen true/false decision
1434 *
1435 *==========================================================================*/
1436bool QCamera3HardwareInterface::isSupportChannelNeeded(
1437 camera3_stream_configuration_t *streamList,
1438 cam_stream_size_info_t stream_config_info)
1439{
1440 uint32_t i;
1441 bool pprocRequested = false;
1442 /* Check for conditions where PProc pipeline does not have any streams*/
1443 for (i = 0; i < stream_config_info.num_streams; i++) {
1444 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1445 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1446 pprocRequested = true;
1447 break;
1448 }
1449 }
1450
1451 if (pprocRequested == false )
1452 return true;
1453
1454 /* Dummy stream needed if only raw or jpeg streams present */
1455 for (i = 0; i < streamList->num_streams; i++) {
1456 switch(streamList->streams[i]->format) {
1457 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1458 case HAL_PIXEL_FORMAT_RAW10:
1459 case HAL_PIXEL_FORMAT_RAW16:
1460 case HAL_PIXEL_FORMAT_BLOB:
1461 break;
1462 default:
1463 return false;
1464 }
1465 }
1466 return true;
1467}
1468
1469/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001470 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001471 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001473 *
1474 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001475 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001476 *
1477 * RETURN : int32_t type of status
1478 * NO_ERROR -- success
1479 * none-zero failure code
1480 *
1481 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001483{
1484 int32_t rc = NO_ERROR;
1485
1486 cam_dimension_t max_dim = {0, 0};
1487 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1488 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1489 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1490 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1491 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1492 }
1493
1494 clear_metadata_buffer(mParameters);
1495
1496 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1497 max_dim);
1498 if (rc != NO_ERROR) {
1499 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1500 return rc;
1501 }
1502
1503 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1504 if (rc != NO_ERROR) {
1505 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1506 return rc;
1507 }
1508
1509 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001510 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001511
1512 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1513 mParameters);
1514 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001515 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001516 return rc;
1517 }
1518
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001519 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001520 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1521 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1522 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1523 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1524 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001525
1526 return rc;
1527}
1528
1529/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001530 * FUNCTION : getCurrentSensorModeInfo
1531 *
1532 * DESCRIPTION: Get sensor mode information that is currently selected.
1533 *
1534 * PARAMETERS :
1535 * @sensorModeInfo : sensor mode information (output)
1536 *
1537 * RETURN : int32_t type of status
1538 * NO_ERROR -- success
1539 * none-zero failure code
1540 *
1541 *==========================================================================*/
1542int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1543{
1544 int32_t rc = NO_ERROR;
1545
1546 clear_metadata_buffer(mParameters);
1547 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1548
1549 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1550 mParameters);
1551 if (rc != NO_ERROR) {
1552 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1553 return rc;
1554 }
1555
1556 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1557 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1558 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1559 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1560 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1561 sensorModeInfo.num_raw_bits);
1562
1563 return rc;
1564}
1565
1566/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001567 * FUNCTION : addToPPFeatureMask
1568 *
1569 * DESCRIPTION: add additional features to pp feature mask based on
1570 * stream type and usecase
1571 *
1572 * PARAMETERS :
1573 * @stream_format : stream type for feature mask
1574 * @stream_idx : stream idx within postprocess_mask list to change
1575 *
1576 * RETURN : NULL
1577 *
1578 *==========================================================================*/
1579void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1580 uint32_t stream_idx)
1581{
1582 char feature_mask_value[PROPERTY_VALUE_MAX];
1583 cam_feature_mask_t feature_mask;
1584 int args_converted;
1585 int property_len;
1586
1587 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001588#ifdef _LE_CAMERA_
1589 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1590 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1591 property_len = property_get("persist.camera.hal3.feature",
1592 feature_mask_value, swtnr_feature_mask_value);
1593#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001594 property_len = property_get("persist.camera.hal3.feature",
1595 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001596#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001597 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1598 (feature_mask_value[1] == 'x')) {
1599 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1600 } else {
1601 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1602 }
1603 if (1 != args_converted) {
1604 feature_mask = 0;
1605 LOGE("Wrong feature mask %s", feature_mask_value);
1606 return;
1607 }
1608
1609 switch (stream_format) {
1610 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1611 /* Add LLVD to pp feature mask only if video hint is enabled */
1612 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1613 mStreamConfigInfo.postprocess_mask[stream_idx]
1614 |= CAM_QTI_FEATURE_SW_TNR;
1615 LOGH("Added SW TNR to pp feature mask");
1616 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1617 mStreamConfigInfo.postprocess_mask[stream_idx]
1618 |= CAM_QCOM_FEATURE_LLVD;
1619 LOGH("Added LLVD SeeMore to pp feature mask");
1620 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001621 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1622 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1623 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1624 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001625 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1626 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1627 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1628 CAM_QTI_FEATURE_BINNING_CORRECTION;
1629 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001630 break;
1631 }
1632 default:
1633 break;
1634 }
1635 LOGD("PP feature mask %llx",
1636 mStreamConfigInfo.postprocess_mask[stream_idx]);
1637}
1638
1639/*==============================================================================
1640 * FUNCTION : updateFpsInPreviewBuffer
1641 *
1642 * DESCRIPTION: update FPS information in preview buffer.
1643 *
1644 * PARAMETERS :
1645 * @metadata : pointer to metadata buffer
1646 * @frame_number: frame_number to look for in pending buffer list
1647 *
1648 * RETURN : None
1649 *
1650 *==========================================================================*/
1651void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1652 uint32_t frame_number)
1653{
1654 // Mark all pending buffers for this particular request
1655 // with corresponding framerate information
1656 for (List<PendingBuffersInRequest>::iterator req =
1657 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1658 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1659 for(List<PendingBufferInfo>::iterator j =
1660 req->mPendingBufferList.begin();
1661 j != req->mPendingBufferList.end(); j++) {
1662 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1663 if ((req->frame_number == frame_number) &&
1664 (channel->getStreamTypeMask() &
1665 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1666 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1667 CAM_INTF_PARM_FPS_RANGE, metadata) {
1668 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1669 struct private_handle_t *priv_handle =
1670 (struct private_handle_t *)(*(j->buffer));
1671 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1672 }
1673 }
1674 }
1675 }
1676}
1677
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001678/*==============================================================================
1679 * FUNCTION : updateTimeStampInPendingBuffers
1680 *
1681 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1682 * of a frame number
1683 *
1684 * PARAMETERS :
1685 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1686 * @timestamp : timestamp to be set
1687 *
1688 * RETURN : None
1689 *
1690 *==========================================================================*/
1691void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1692 uint32_t frameNumber, nsecs_t timestamp)
1693{
1694 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1695 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1696 if (req->frame_number != frameNumber)
1697 continue;
1698
1699 for (auto k = req->mPendingBufferList.begin();
1700 k != req->mPendingBufferList.end(); k++ ) {
1701 struct private_handle_t *priv_handle =
1702 (struct private_handle_t *) (*(k->buffer));
1703 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1704 }
1705 }
1706 return;
1707}
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709/*===========================================================================
1710 * FUNCTION : configureStreams
1711 *
1712 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1713 * and output streams.
1714 *
1715 * PARAMETERS :
1716 * @stream_list : streams to be configured
1717 *
1718 * RETURN :
1719 *
1720 *==========================================================================*/
1721int QCamera3HardwareInterface::configureStreams(
1722 camera3_stream_configuration_t *streamList)
1723{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001724 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001725 int rc = 0;
1726
1727 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001728 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001729 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001730 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001731
1732 return rc;
1733}
1734
1735/*===========================================================================
1736 * FUNCTION : configureStreamsPerfLocked
1737 *
1738 * DESCRIPTION: configureStreams while perfLock is held.
1739 *
1740 * PARAMETERS :
1741 * @stream_list : streams to be configured
1742 *
1743 * RETURN : int32_t type of status
1744 * NO_ERROR -- success
1745 * none-zero failure code
1746 *==========================================================================*/
1747int QCamera3HardwareInterface::configureStreamsPerfLocked(
1748 camera3_stream_configuration_t *streamList)
1749{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001750 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001751 int rc = 0;
1752
1753 // Sanity check stream_list
1754 if (streamList == NULL) {
1755 LOGE("NULL stream configuration");
1756 return BAD_VALUE;
1757 }
1758 if (streamList->streams == NULL) {
1759 LOGE("NULL stream list");
1760 return BAD_VALUE;
1761 }
1762
1763 if (streamList->num_streams < 1) {
1764 LOGE("Bad number of streams requested: %d",
1765 streamList->num_streams);
1766 return BAD_VALUE;
1767 }
1768
1769 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1770 LOGE("Maximum number of streams %d exceeded: %d",
1771 MAX_NUM_STREAMS, streamList->num_streams);
1772 return BAD_VALUE;
1773 }
1774
Jason Leec4cf5032017-05-24 18:31:41 -07001775 mOpMode = streamList->operation_mode;
1776 LOGD("mOpMode: %d", mOpMode);
1777
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001778 rc = validateUsageFlags(streamList);
1779 if (rc != NO_ERROR) {
1780 return rc;
1781 }
1782
Thierry Strudel3d639192016-09-09 11:52:26 -07001783 /* first invalidate all the steams in the mStreamList
1784 * if they appear again, they will be validated */
1785 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1786 it != mStreamInfo.end(); it++) {
1787 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1788 if (channel) {
1789 channel->stop();
1790 }
1791 (*it)->status = INVALID;
1792 }
1793
1794 if (mRawDumpChannel) {
1795 mRawDumpChannel->stop();
1796 delete mRawDumpChannel;
1797 mRawDumpChannel = NULL;
1798 }
1799
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001800 if (mHdrPlusRawSrcChannel) {
1801 mHdrPlusRawSrcChannel->stop();
1802 delete mHdrPlusRawSrcChannel;
1803 mHdrPlusRawSrcChannel = NULL;
1804 }
1805
Thierry Strudel3d639192016-09-09 11:52:26 -07001806 if (mSupportChannel)
1807 mSupportChannel->stop();
1808
1809 if (mAnalysisChannel) {
1810 mAnalysisChannel->stop();
1811 }
1812 if (mMetadataChannel) {
1813 /* If content of mStreamInfo is not 0, there is metadata stream */
1814 mMetadataChannel->stop();
1815 }
1816 if (mChannelHandle) {
1817 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1818 mChannelHandle);
1819 LOGD("stopping channel %d", mChannelHandle);
1820 }
1821
1822 pthread_mutex_lock(&mMutex);
1823
1824 // Check state
1825 switch (mState) {
1826 case INITIALIZED:
1827 case CONFIGURED:
1828 case STARTED:
1829 /* valid state */
1830 break;
1831 default:
1832 LOGE("Invalid state %d", mState);
1833 pthread_mutex_unlock(&mMutex);
1834 return -ENODEV;
1835 }
1836
1837 /* Check whether we have video stream */
1838 m_bIs4KVideo = false;
1839 m_bIsVideo = false;
1840 m_bEisSupportedSize = false;
1841 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001842 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001843 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001844 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001845 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 uint32_t videoWidth = 0U;
1847 uint32_t videoHeight = 0U;
1848 size_t rawStreamCnt = 0;
1849 size_t stallStreamCnt = 0;
1850 size_t processedStreamCnt = 0;
1851 // Number of streams on ISP encoder path
1852 size_t numStreamsOnEncoder = 0;
1853 size_t numYuv888OnEncoder = 0;
1854 bool bYuv888OverrideJpeg = false;
1855 cam_dimension_t largeYuv888Size = {0, 0};
1856 cam_dimension_t maxViewfinderSize = {0, 0};
1857 bool bJpegExceeds4K = false;
1858 bool bJpegOnEncoder = false;
1859 bool bUseCommonFeatureMask = false;
1860 cam_feature_mask_t commonFeatureMask = 0;
1861 bool bSmallJpegSize = false;
1862 uint32_t width_ratio;
1863 uint32_t height_ratio;
1864 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1865 camera3_stream_t *inputStream = NULL;
1866 bool isJpeg = false;
1867 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001868 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001869 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001870
1871 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1872
1873 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001874 uint8_t eis_prop_set;
1875 uint32_t maxEisWidth = 0;
1876 uint32_t maxEisHeight = 0;
1877
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001878 // Initialize all instant AEC related variables
1879 mInstantAEC = false;
1880 mResetInstantAEC = false;
1881 mInstantAECSettledFrameNumber = 0;
1882 mAecSkipDisplayFrameBound = 0;
1883 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001884 mCurrFeatureState = 0;
1885 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001886
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1888
1889 size_t count = IS_TYPE_MAX;
1890 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1891 for (size_t i = 0; i < count; i++) {
1892 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001893 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1894 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001895 break;
1896 }
1897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001898
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001899 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001900 maxEisWidth = MAX_EIS_WIDTH;
1901 maxEisHeight = MAX_EIS_HEIGHT;
1902 }
1903
1904 /* EIS setprop control */
1905 char eis_prop[PROPERTY_VALUE_MAX];
1906 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001907 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001908 eis_prop_set = (uint8_t)atoi(eis_prop);
1909
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001910 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001911 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1912
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001913 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1914 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001915
Thierry Strudel3d639192016-09-09 11:52:26 -07001916 /* stream configurations */
1917 for (size_t i = 0; i < streamList->num_streams; i++) {
1918 camera3_stream_t *newStream = streamList->streams[i];
1919 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1920 "height = %d, rotation = %d, usage = 0x%x",
1921 i, newStream->stream_type, newStream->format,
1922 newStream->width, newStream->height, newStream->rotation,
1923 newStream->usage);
1924 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1925 newStream->stream_type == CAMERA3_STREAM_INPUT){
1926 isZsl = true;
1927 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001928 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1929 IS_USAGE_PREVIEW(newStream->usage)) {
1930 isPreview = true;
1931 }
1932
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1934 inputStream = newStream;
1935 }
1936
Emilian Peev7650c122017-01-19 08:24:33 -08001937 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1938 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001939 isJpeg = true;
1940 jpegSize.width = newStream->width;
1941 jpegSize.height = newStream->height;
1942 if (newStream->width > VIDEO_4K_WIDTH ||
1943 newStream->height > VIDEO_4K_HEIGHT)
1944 bJpegExceeds4K = true;
1945 }
1946
1947 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1948 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1949 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001950 // In HAL3 we can have multiple different video streams.
1951 // The variables video width and height are used below as
1952 // dimensions of the biggest of them
1953 if (videoWidth < newStream->width ||
1954 videoHeight < newStream->height) {
1955 videoWidth = newStream->width;
1956 videoHeight = newStream->height;
1957 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001958 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1959 (VIDEO_4K_HEIGHT <= newStream->height)) {
1960 m_bIs4KVideo = true;
1961 }
1962 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1963 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001964
Thierry Strudel3d639192016-09-09 11:52:26 -07001965 }
1966 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1967 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1968 switch (newStream->format) {
1969 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001970 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1971 depthPresent = true;
1972 break;
1973 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001974 stallStreamCnt++;
1975 if (isOnEncoder(maxViewfinderSize, newStream->width,
1976 newStream->height)) {
1977 numStreamsOnEncoder++;
1978 bJpegOnEncoder = true;
1979 }
1980 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1981 newStream->width);
1982 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1983 newStream->height);;
1984 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1985 "FATAL: max_downscale_factor cannot be zero and so assert");
1986 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1987 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1988 LOGH("Setting small jpeg size flag to true");
1989 bSmallJpegSize = true;
1990 }
1991 break;
1992 case HAL_PIXEL_FORMAT_RAW10:
1993 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1994 case HAL_PIXEL_FORMAT_RAW16:
1995 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001996 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1997 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1998 pdStatCount++;
1999 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002000 break;
2001 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2002 processedStreamCnt++;
2003 if (isOnEncoder(maxViewfinderSize, newStream->width,
2004 newStream->height)) {
2005 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2006 !IS_USAGE_ZSL(newStream->usage)) {
2007 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2008 }
2009 numStreamsOnEncoder++;
2010 }
2011 break;
2012 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2013 processedStreamCnt++;
2014 if (isOnEncoder(maxViewfinderSize, newStream->width,
2015 newStream->height)) {
2016 // If Yuv888 size is not greater than 4K, set feature mask
2017 // to SUPERSET so that it support concurrent request on
2018 // YUV and JPEG.
2019 if (newStream->width <= VIDEO_4K_WIDTH &&
2020 newStream->height <= VIDEO_4K_HEIGHT) {
2021 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2022 }
2023 numStreamsOnEncoder++;
2024 numYuv888OnEncoder++;
2025 largeYuv888Size.width = newStream->width;
2026 largeYuv888Size.height = newStream->height;
2027 }
2028 break;
2029 default:
2030 processedStreamCnt++;
2031 if (isOnEncoder(maxViewfinderSize, newStream->width,
2032 newStream->height)) {
2033 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2034 numStreamsOnEncoder++;
2035 }
2036 break;
2037 }
2038
2039 }
2040 }
2041
2042 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2043 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2044 !m_bIsVideo) {
2045 m_bEisEnable = false;
2046 }
2047
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002048 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2049 pthread_mutex_unlock(&mMutex);
2050 return -EINVAL;
2051 }
2052
Thierry Strudel54dc9782017-02-15 12:12:10 -08002053 uint8_t forceEnableTnr = 0;
2054 char tnr_prop[PROPERTY_VALUE_MAX];
2055 memset(tnr_prop, 0, sizeof(tnr_prop));
2056 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2057 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2058
Thierry Strudel3d639192016-09-09 11:52:26 -07002059 /* Logic to enable/disable TNR based on specific config size/etc.*/
2060 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002061 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2062 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002063 else if (forceEnableTnr)
2064 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002065
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002066 char videoHdrProp[PROPERTY_VALUE_MAX];
2067 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2068 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2069 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2070
2071 if (hdr_mode_prop == 1 && m_bIsVideo &&
2072 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2073 m_bVideoHdrEnabled = true;
2074 else
2075 m_bVideoHdrEnabled = false;
2076
2077
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 /* Check if num_streams is sane */
2079 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2080 rawStreamCnt > MAX_RAW_STREAMS ||
2081 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2082 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2083 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2084 pthread_mutex_unlock(&mMutex);
2085 return -EINVAL;
2086 }
2087 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002088 if (isZsl && m_bIs4KVideo) {
2089 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002090 pthread_mutex_unlock(&mMutex);
2091 return -EINVAL;
2092 }
2093 /* Check if stream sizes are sane */
2094 if (numStreamsOnEncoder > 2) {
2095 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2096 pthread_mutex_unlock(&mMutex);
2097 return -EINVAL;
2098 } else if (1 < numStreamsOnEncoder){
2099 bUseCommonFeatureMask = true;
2100 LOGH("Multiple streams above max viewfinder size, common mask needed");
2101 }
2102
2103 /* Check if BLOB size is greater than 4k in 4k recording case */
2104 if (m_bIs4KVideo && bJpegExceeds4K) {
2105 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2106 pthread_mutex_unlock(&mMutex);
2107 return -EINVAL;
2108 }
2109
Emilian Peev7650c122017-01-19 08:24:33 -08002110 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2111 depthPresent) {
2112 LOGE("HAL doesn't support depth streams in HFR mode!");
2113 pthread_mutex_unlock(&mMutex);
2114 return -EINVAL;
2115 }
2116
Thierry Strudel3d639192016-09-09 11:52:26 -07002117 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2118 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2119 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2120 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2121 // configurations:
2122 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2123 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2124 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2125 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2126 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2127 __func__);
2128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131
2132 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2133 // the YUV stream's size is greater or equal to the JPEG size, set common
2134 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2135 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2136 jpegSize.width, jpegSize.height) &&
2137 largeYuv888Size.width > jpegSize.width &&
2138 largeYuv888Size.height > jpegSize.height) {
2139 bYuv888OverrideJpeg = true;
2140 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2141 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2142 }
2143
2144 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2145 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2146 commonFeatureMask);
2147 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2148 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2149
2150 rc = validateStreamDimensions(streamList);
2151 if (rc == NO_ERROR) {
2152 rc = validateStreamRotations(streamList);
2153 }
2154 if (rc != NO_ERROR) {
2155 LOGE("Invalid stream configuration requested!");
2156 pthread_mutex_unlock(&mMutex);
2157 return rc;
2158 }
2159
Emilian Peev0f3c3162017-03-15 12:57:46 +00002160 if (1 < pdStatCount) {
2161 LOGE("HAL doesn't support multiple PD streams");
2162 pthread_mutex_unlock(&mMutex);
2163 return -EINVAL;
2164 }
2165
2166 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2167 (1 == pdStatCount)) {
2168 LOGE("HAL doesn't support PD streams in HFR mode!");
2169 pthread_mutex_unlock(&mMutex);
2170 return -EINVAL;
2171 }
2172
Thierry Strudel3d639192016-09-09 11:52:26 -07002173 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2174 for (size_t i = 0; i < streamList->num_streams; i++) {
2175 camera3_stream_t *newStream = streamList->streams[i];
2176 LOGH("newStream type = %d, stream format = %d "
2177 "stream size : %d x %d, stream rotation = %d",
2178 newStream->stream_type, newStream->format,
2179 newStream->width, newStream->height, newStream->rotation);
2180 //if the stream is in the mStreamList validate it
2181 bool stream_exists = false;
2182 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2183 it != mStreamInfo.end(); it++) {
2184 if ((*it)->stream == newStream) {
2185 QCamera3ProcessingChannel *channel =
2186 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2187 stream_exists = true;
2188 if (channel)
2189 delete channel;
2190 (*it)->status = VALID;
2191 (*it)->stream->priv = NULL;
2192 (*it)->channel = NULL;
2193 }
2194 }
2195 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2196 //new stream
2197 stream_info_t* stream_info;
2198 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2199 if (!stream_info) {
2200 LOGE("Could not allocate stream info");
2201 rc = -ENOMEM;
2202 pthread_mutex_unlock(&mMutex);
2203 return rc;
2204 }
2205 stream_info->stream = newStream;
2206 stream_info->status = VALID;
2207 stream_info->channel = NULL;
2208 mStreamInfo.push_back(stream_info);
2209 }
2210 /* Covers Opaque ZSL and API1 F/W ZSL */
2211 if (IS_USAGE_ZSL(newStream->usage)
2212 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2213 if (zslStream != NULL) {
2214 LOGE("Multiple input/reprocess streams requested!");
2215 pthread_mutex_unlock(&mMutex);
2216 return BAD_VALUE;
2217 }
2218 zslStream = newStream;
2219 }
2220 /* Covers YUV reprocess */
2221 if (inputStream != NULL) {
2222 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2223 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2224 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2225 && inputStream->width == newStream->width
2226 && inputStream->height == newStream->height) {
2227 if (zslStream != NULL) {
2228 /* This scenario indicates multiple YUV streams with same size
2229 * as input stream have been requested, since zsl stream handle
2230 * is solely use for the purpose of overriding the size of streams
2231 * which share h/w streams we will just make a guess here as to
2232 * which of the stream is a ZSL stream, this will be refactored
2233 * once we make generic logic for streams sharing encoder output
2234 */
2235 LOGH("Warning, Multiple ip/reprocess streams requested!");
2236 }
2237 zslStream = newStream;
2238 }
2239 }
2240 }
2241
2242 /* If a zsl stream is set, we know that we have configured at least one input or
2243 bidirectional stream */
2244 if (NULL != zslStream) {
2245 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2246 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2247 mInputStreamInfo.format = zslStream->format;
2248 mInputStreamInfo.usage = zslStream->usage;
2249 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2250 mInputStreamInfo.dim.width,
2251 mInputStreamInfo.dim.height,
2252 mInputStreamInfo.format, mInputStreamInfo.usage);
2253 }
2254
2255 cleanAndSortStreamInfo();
2256 if (mMetadataChannel) {
2257 delete mMetadataChannel;
2258 mMetadataChannel = NULL;
2259 }
2260 if (mSupportChannel) {
2261 delete mSupportChannel;
2262 mSupportChannel = NULL;
2263 }
2264
2265 if (mAnalysisChannel) {
2266 delete mAnalysisChannel;
2267 mAnalysisChannel = NULL;
2268 }
2269
2270 if (mDummyBatchChannel) {
2271 delete mDummyBatchChannel;
2272 mDummyBatchChannel = NULL;
2273 }
2274
Emilian Peev7650c122017-01-19 08:24:33 -08002275 if (mDepthChannel) {
2276 mDepthChannel = NULL;
2277 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002278 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002279
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002280 mShutterDispatcher.clear();
2281 mOutputBufferDispatcher.clear();
2282
Thierry Strudel2896d122017-02-23 19:18:03 -08002283 char is_type_value[PROPERTY_VALUE_MAX];
2284 property_get("persist.camera.is_type", is_type_value, "4");
2285 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2286
Binhao Line406f062017-05-03 14:39:44 -07002287 char property_value[PROPERTY_VALUE_MAX];
2288 property_get("persist.camera.gzoom.at", property_value, "0");
2289 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002290 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2291 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2292 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2293 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002294
2295 property_get("persist.camera.gzoom.4k", property_value, "0");
2296 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2297
Thierry Strudel3d639192016-09-09 11:52:26 -07002298 //Create metadata channel and initialize it
2299 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2300 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2301 gCamCapability[mCameraId]->color_arrangement);
2302 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2303 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002304 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 if (mMetadataChannel == NULL) {
2306 LOGE("failed to allocate metadata channel");
2307 rc = -ENOMEM;
2308 pthread_mutex_unlock(&mMutex);
2309 return rc;
2310 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002311 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002312 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2313 if (rc < 0) {
2314 LOGE("metadata channel initialization failed");
2315 delete mMetadataChannel;
2316 mMetadataChannel = NULL;
2317 pthread_mutex_unlock(&mMutex);
2318 return rc;
2319 }
2320
Thierry Strudel2896d122017-02-23 19:18:03 -08002321 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002322 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002323 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002324 // Keep track of preview/video streams indices.
2325 // There could be more than one preview streams, but only one video stream.
2326 int32_t video_stream_idx = -1;
2327 int32_t preview_stream_idx[streamList->num_streams];
2328 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002329 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2330 /* Allocate channel objects for the requested streams */
2331 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002332
Thierry Strudel3d639192016-09-09 11:52:26 -07002333 camera3_stream_t *newStream = streamList->streams[i];
2334 uint32_t stream_usage = newStream->usage;
2335 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2336 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2337 struct camera_info *p_info = NULL;
2338 pthread_mutex_lock(&gCamLock);
2339 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2340 pthread_mutex_unlock(&gCamLock);
2341 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2342 || IS_USAGE_ZSL(newStream->usage)) &&
2343 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002344 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002345 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002346 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2347 if (bUseCommonFeatureMask)
2348 zsl_ppmask = commonFeatureMask;
2349 else
2350 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002352 if (numStreamsOnEncoder > 0)
2353 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2354 else
2355 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002356 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002357 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002358 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002359 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002360 LOGH("Input stream configured, reprocess config");
2361 } else {
2362 //for non zsl streams find out the format
2363 switch (newStream->format) {
2364 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2365 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002366 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002367 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2368 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2369 /* add additional features to pp feature mask */
2370 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2371 mStreamConfigInfo.num_streams);
2372
2373 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2374 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2375 CAM_STREAM_TYPE_VIDEO;
2376 if (m_bTnrEnabled && m_bTnrVideo) {
2377 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2378 CAM_QCOM_FEATURE_CPP_TNR;
2379 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2380 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2381 ~CAM_QCOM_FEATURE_CDS;
2382 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002383 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2384 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2385 CAM_QTI_FEATURE_PPEISCORE;
2386 }
Binhao Line406f062017-05-03 14:39:44 -07002387 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2388 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2389 CAM_QCOM_FEATURE_GOOG_ZOOM;
2390 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002391 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002392 } else {
2393 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2394 CAM_STREAM_TYPE_PREVIEW;
2395 if (m_bTnrEnabled && m_bTnrPreview) {
2396 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2397 CAM_QCOM_FEATURE_CPP_TNR;
2398 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2399 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2400 ~CAM_QCOM_FEATURE_CDS;
2401 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002402 if(!m_bSwTnrPreview) {
2403 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2404 ~CAM_QTI_FEATURE_SW_TNR;
2405 }
Binhao Line406f062017-05-03 14:39:44 -07002406 if (is_goog_zoom_preview_enabled) {
2407 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2408 CAM_QCOM_FEATURE_GOOG_ZOOM;
2409 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002410 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002411 padding_info.width_padding = mSurfaceStridePadding;
2412 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002413 previewSize.width = (int32_t)newStream->width;
2414 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 }
2416 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2417 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2418 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2419 newStream->height;
2420 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2421 newStream->width;
2422 }
2423 }
2424 break;
2425 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002426 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002427 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2428 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2429 if (bUseCommonFeatureMask)
2430 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2431 commonFeatureMask;
2432 else
2433 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2434 CAM_QCOM_FEATURE_NONE;
2435 } else {
2436 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2437 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2438 }
2439 break;
2440 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002441 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002442 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2443 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2444 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2445 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2446 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002447 /* Remove rotation if it is not supported
2448 for 4K LiveVideo snapshot case (online processing) */
2449 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2450 CAM_QCOM_FEATURE_ROTATION)) {
2451 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2452 &= ~CAM_QCOM_FEATURE_ROTATION;
2453 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002454 } else {
2455 if (bUseCommonFeatureMask &&
2456 isOnEncoder(maxViewfinderSize, newStream->width,
2457 newStream->height)) {
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2459 } else {
2460 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2461 }
2462 }
2463 if (isZsl) {
2464 if (zslStream) {
2465 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2466 (int32_t)zslStream->width;
2467 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2468 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002469 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2470 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002471 } else {
2472 LOGE("Error, No ZSL stream identified");
2473 pthread_mutex_unlock(&mMutex);
2474 return -EINVAL;
2475 }
2476 } else if (m_bIs4KVideo) {
2477 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2478 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2479 } else if (bYuv888OverrideJpeg) {
2480 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2481 (int32_t)largeYuv888Size.width;
2482 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2483 (int32_t)largeYuv888Size.height;
2484 }
2485 break;
2486 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2487 case HAL_PIXEL_FORMAT_RAW16:
2488 case HAL_PIXEL_FORMAT_RAW10:
2489 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2490 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2491 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002492 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2493 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2494 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2495 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2496 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2497 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2498 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2499 gCamCapability[mCameraId]->dt[mPDIndex];
2500 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2501 gCamCapability[mCameraId]->vc[mPDIndex];
2502 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002503 break;
2504 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002505 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002506 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2508 break;
2509 }
2510 }
2511
2512 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2513 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2514 gCamCapability[mCameraId]->color_arrangement);
2515
2516 if (newStream->priv == NULL) {
2517 //New stream, construct channel
2518 switch (newStream->stream_type) {
2519 case CAMERA3_STREAM_INPUT:
2520 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2521 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2522 break;
2523 case CAMERA3_STREAM_BIDIRECTIONAL:
2524 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2525 GRALLOC_USAGE_HW_CAMERA_WRITE;
2526 break;
2527 case CAMERA3_STREAM_OUTPUT:
2528 /* For video encoding stream, set read/write rarely
2529 * flag so that they may be set to un-cached */
2530 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2531 newStream->usage |=
2532 (GRALLOC_USAGE_SW_READ_RARELY |
2533 GRALLOC_USAGE_SW_WRITE_RARELY |
2534 GRALLOC_USAGE_HW_CAMERA_WRITE);
2535 else if (IS_USAGE_ZSL(newStream->usage))
2536 {
2537 LOGD("ZSL usage flag skipping");
2538 }
2539 else if (newStream == zslStream
2540 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2541 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2542 } else
2543 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2544 break;
2545 default:
2546 LOGE("Invalid stream_type %d", newStream->stream_type);
2547 break;
2548 }
2549
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002550 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002551 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2552 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2553 QCamera3ProcessingChannel *channel = NULL;
2554 switch (newStream->format) {
2555 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2556 if ((newStream->usage &
2557 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2558 (streamList->operation_mode ==
2559 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2560 ) {
2561 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2562 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002563 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002564 this,
2565 newStream,
2566 (cam_stream_type_t)
2567 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2568 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2569 mMetadataChannel,
2570 0); //heap buffers are not required for HFR video channel
2571 if (channel == NULL) {
2572 LOGE("allocation of channel failed");
2573 pthread_mutex_unlock(&mMutex);
2574 return -ENOMEM;
2575 }
2576 //channel->getNumBuffers() will return 0 here so use
2577 //MAX_INFLIGH_HFR_REQUESTS
2578 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2579 newStream->priv = channel;
2580 LOGI("num video buffers in HFR mode: %d",
2581 MAX_INFLIGHT_HFR_REQUESTS);
2582 } else {
2583 /* Copy stream contents in HFR preview only case to create
2584 * dummy batch channel so that sensor streaming is in
2585 * HFR mode */
2586 if (!m_bIsVideo && (streamList->operation_mode ==
2587 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2588 mDummyBatchStream = *newStream;
2589 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002590 int bufferCount = MAX_INFLIGHT_REQUESTS;
2591 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2592 CAM_STREAM_TYPE_VIDEO) {
2593 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2594 bufferCount = MAX_VIDEO_BUFFERS;
2595 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002596 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2597 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002598 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002599 this,
2600 newStream,
2601 (cam_stream_type_t)
2602 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2603 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2604 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002605 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002606 if (channel == NULL) {
2607 LOGE("allocation of channel failed");
2608 pthread_mutex_unlock(&mMutex);
2609 return -ENOMEM;
2610 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002611 /* disable UBWC for preview, though supported,
2612 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002613 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002614 (previewSize.width == (int32_t)videoWidth)&&
2615 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002616 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002617 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002619 /* When goog_zoom is linked to the preview or video stream,
2620 * disable ubwc to the linked stream */
2621 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2622 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2623 channel->setUBWCEnabled(false);
2624 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002625 newStream->max_buffers = channel->getNumBuffers();
2626 newStream->priv = channel;
2627 }
2628 break;
2629 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2630 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2631 mChannelHandle,
2632 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002633 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002634 this,
2635 newStream,
2636 (cam_stream_type_t)
2637 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2638 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2639 mMetadataChannel);
2640 if (channel == NULL) {
2641 LOGE("allocation of YUV channel failed");
2642 pthread_mutex_unlock(&mMutex);
2643 return -ENOMEM;
2644 }
2645 newStream->max_buffers = channel->getNumBuffers();
2646 newStream->priv = channel;
2647 break;
2648 }
2649 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2650 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002651 case HAL_PIXEL_FORMAT_RAW10: {
2652 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2653 (HAL_DATASPACE_DEPTH != newStream->data_space))
2654 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002655 mRawChannel = new QCamera3RawChannel(
2656 mCameraHandle->camera_handle, mChannelHandle,
2657 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002658 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002659 this, newStream,
2660 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002661 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002662 if (mRawChannel == NULL) {
2663 LOGE("allocation of raw channel failed");
2664 pthread_mutex_unlock(&mMutex);
2665 return -ENOMEM;
2666 }
2667 newStream->max_buffers = mRawChannel->getNumBuffers();
2668 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2669 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002670 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002672 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2673 mDepthChannel = new QCamera3DepthChannel(
2674 mCameraHandle->camera_handle, mChannelHandle,
2675 mCameraHandle->ops, NULL, NULL, &padding_info,
2676 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2677 mMetadataChannel);
2678 if (NULL == mDepthChannel) {
2679 LOGE("Allocation of depth channel failed");
2680 pthread_mutex_unlock(&mMutex);
2681 return NO_MEMORY;
2682 }
2683 newStream->priv = mDepthChannel;
2684 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2685 } else {
2686 // Max live snapshot inflight buffer is 1. This is to mitigate
2687 // frame drop issues for video snapshot. The more buffers being
2688 // allocated, the more frame drops there are.
2689 mPictureChannel = new QCamera3PicChannel(
2690 mCameraHandle->camera_handle, mChannelHandle,
2691 mCameraHandle->ops, captureResultCb,
2692 setBufferErrorStatus, &padding_info, this, newStream,
2693 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2694 m_bIs4KVideo, isZsl, mMetadataChannel,
2695 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2696 if (mPictureChannel == NULL) {
2697 LOGE("allocation of channel failed");
2698 pthread_mutex_unlock(&mMutex);
2699 return -ENOMEM;
2700 }
2701 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2702 newStream->max_buffers = mPictureChannel->getNumBuffers();
2703 mPictureChannel->overrideYuvSize(
2704 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2705 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002707 break;
2708
2709 default:
2710 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002711 pthread_mutex_unlock(&mMutex);
2712 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002713 }
2714 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2715 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2716 } else {
2717 LOGE("Error, Unknown stream type");
2718 pthread_mutex_unlock(&mMutex);
2719 return -EINVAL;
2720 }
2721
2722 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002723 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002724 // Here we only care whether it's EIS3 or not
2725 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2726 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2727 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2728 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002729 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002730 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002731 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002732 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2733 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2734 }
2735 }
2736
2737 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2738 it != mStreamInfo.end(); it++) {
2739 if ((*it)->stream == newStream) {
2740 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2741 break;
2742 }
2743 }
2744 } else {
2745 // Channel already exists for this stream
2746 // Do nothing for now
2747 }
2748 padding_info = gCamCapability[mCameraId]->padding_info;
2749
Emilian Peev7650c122017-01-19 08:24:33 -08002750 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002751 * since there is no real stream associated with it
2752 */
Emilian Peev7650c122017-01-19 08:24:33 -08002753 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002754 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2755 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002756 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002757 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002758 }
2759
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002760 // Let buffer dispatcher know the configured streams.
2761 mOutputBufferDispatcher.configureStreams(streamList);
2762
Binhao Lincdb362a2017-04-20 13:31:54 -07002763 // By default, preview stream TNR is disabled.
2764 // Enable TNR to the preview stream if all conditions below are satisfied:
2765 // 1. resolution <= 1080p.
2766 // 2. preview resolution == video resolution.
2767 // 3. video stream TNR is enabled.
2768 // 4. EIS2.0
2769 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2770 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2771 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2772 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2773 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2774 video_stream->width == preview_stream->width &&
2775 video_stream->height == preview_stream->height) {
2776 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2777 CAM_QCOM_FEATURE_CPP_TNR;
2778 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2779 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2780 ~CAM_QCOM_FEATURE_CDS;
2781 }
2782 }
2783
Thierry Strudel2896d122017-02-23 19:18:03 -08002784 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2785 onlyRaw = false;
2786 }
2787
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002788 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002789 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002790 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002791 cam_analysis_info_t analysisInfo;
2792 int32_t ret = NO_ERROR;
2793 ret = mCommon.getAnalysisInfo(
2794 FALSE,
2795 analysisFeatureMask,
2796 &analysisInfo);
2797 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002798 cam_color_filter_arrangement_t analysis_color_arrangement =
2799 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2800 CAM_FILTER_ARRANGEMENT_Y :
2801 gCamCapability[mCameraId]->color_arrangement);
2802 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2803 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002804 cam_dimension_t analysisDim;
2805 analysisDim = mCommon.getMatchingDimension(previewSize,
2806 analysisInfo.analysis_recommended_res);
2807
2808 mAnalysisChannel = new QCamera3SupportChannel(
2809 mCameraHandle->camera_handle,
2810 mChannelHandle,
2811 mCameraHandle->ops,
2812 &analysisInfo.analysis_padding_info,
2813 analysisFeatureMask,
2814 CAM_STREAM_TYPE_ANALYSIS,
2815 &analysisDim,
2816 (analysisInfo.analysis_format
2817 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2818 : CAM_FORMAT_YUV_420_NV21),
2819 analysisInfo.hw_analysis_supported,
2820 gCamCapability[mCameraId]->color_arrangement,
2821 this,
2822 0); // force buffer count to 0
2823 } else {
2824 LOGW("getAnalysisInfo failed, ret = %d", ret);
2825 }
2826 if (!mAnalysisChannel) {
2827 LOGW("Analysis channel cannot be created");
2828 }
2829 }
2830
Thierry Strudel3d639192016-09-09 11:52:26 -07002831 //RAW DUMP channel
2832 if (mEnableRawDump && isRawStreamRequested == false){
2833 cam_dimension_t rawDumpSize;
2834 rawDumpSize = getMaxRawSize(mCameraId);
2835 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2836 setPAAFSupport(rawDumpFeatureMask,
2837 CAM_STREAM_TYPE_RAW,
2838 gCamCapability[mCameraId]->color_arrangement);
2839 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2840 mChannelHandle,
2841 mCameraHandle->ops,
2842 rawDumpSize,
2843 &padding_info,
2844 this, rawDumpFeatureMask);
2845 if (!mRawDumpChannel) {
2846 LOGE("Raw Dump channel cannot be created");
2847 pthread_mutex_unlock(&mMutex);
2848 return -ENOMEM;
2849 }
2850 }
2851
Thierry Strudel3d639192016-09-09 11:52:26 -07002852 if (mAnalysisChannel) {
2853 cam_analysis_info_t analysisInfo;
2854 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2855 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2856 CAM_STREAM_TYPE_ANALYSIS;
2857 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2858 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002859 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002860 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2861 &analysisInfo);
2862 if (rc != NO_ERROR) {
2863 LOGE("getAnalysisInfo failed, ret = %d", rc);
2864 pthread_mutex_unlock(&mMutex);
2865 return rc;
2866 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002867 cam_color_filter_arrangement_t analysis_color_arrangement =
2868 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2869 CAM_FILTER_ARRANGEMENT_Y :
2870 gCamCapability[mCameraId]->color_arrangement);
2871 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2872 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2873 analysis_color_arrangement);
2874
Thierry Strudel3d639192016-09-09 11:52:26 -07002875 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002876 mCommon.getMatchingDimension(previewSize,
2877 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002878 mStreamConfigInfo.num_streams++;
2879 }
2880
Thierry Strudel2896d122017-02-23 19:18:03 -08002881 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 cam_analysis_info_t supportInfo;
2883 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2884 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2885 setPAAFSupport(callbackFeatureMask,
2886 CAM_STREAM_TYPE_CALLBACK,
2887 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002888 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002889 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002890 if (ret != NO_ERROR) {
2891 /* Ignore the error for Mono camera
2892 * because the PAAF bit mask is only set
2893 * for CAM_STREAM_TYPE_ANALYSIS stream type
2894 */
2895 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2896 LOGW("getAnalysisInfo failed, ret = %d", ret);
2897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002898 }
2899 mSupportChannel = new QCamera3SupportChannel(
2900 mCameraHandle->camera_handle,
2901 mChannelHandle,
2902 mCameraHandle->ops,
2903 &gCamCapability[mCameraId]->padding_info,
2904 callbackFeatureMask,
2905 CAM_STREAM_TYPE_CALLBACK,
2906 &QCamera3SupportChannel::kDim,
2907 CAM_FORMAT_YUV_420_NV21,
2908 supportInfo.hw_analysis_supported,
2909 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002910 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 if (!mSupportChannel) {
2912 LOGE("dummy channel cannot be created");
2913 pthread_mutex_unlock(&mMutex);
2914 return -ENOMEM;
2915 }
2916 }
2917
2918 if (mSupportChannel) {
2919 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2920 QCamera3SupportChannel::kDim;
2921 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2922 CAM_STREAM_TYPE_CALLBACK;
2923 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2924 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2925 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2926 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2927 gCamCapability[mCameraId]->color_arrangement);
2928 mStreamConfigInfo.num_streams++;
2929 }
2930
2931 if (mRawDumpChannel) {
2932 cam_dimension_t rawSize;
2933 rawSize = getMaxRawSize(mCameraId);
2934 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2935 rawSize;
2936 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2937 CAM_STREAM_TYPE_RAW;
2938 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2939 CAM_QCOM_FEATURE_NONE;
2940 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2941 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2942 gCamCapability[mCameraId]->color_arrangement);
2943 mStreamConfigInfo.num_streams++;
2944 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002945
2946 if (mHdrPlusRawSrcChannel) {
2947 cam_dimension_t rawSize;
2948 rawSize = getMaxRawSize(mCameraId);
2949 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2950 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2951 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2952 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2953 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2954 gCamCapability[mCameraId]->color_arrangement);
2955 mStreamConfigInfo.num_streams++;
2956 }
2957
Thierry Strudel3d639192016-09-09 11:52:26 -07002958 /* In HFR mode, if video stream is not added, create a dummy channel so that
2959 * ISP can create a batch mode even for preview only case. This channel is
2960 * never 'start'ed (no stream-on), it is only 'initialized' */
2961 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2962 !m_bIsVideo) {
2963 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2964 setPAAFSupport(dummyFeatureMask,
2965 CAM_STREAM_TYPE_VIDEO,
2966 gCamCapability[mCameraId]->color_arrangement);
2967 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2968 mChannelHandle,
2969 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002970 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002971 this,
2972 &mDummyBatchStream,
2973 CAM_STREAM_TYPE_VIDEO,
2974 dummyFeatureMask,
2975 mMetadataChannel);
2976 if (NULL == mDummyBatchChannel) {
2977 LOGE("creation of mDummyBatchChannel failed."
2978 "Preview will use non-hfr sensor mode ");
2979 }
2980 }
2981 if (mDummyBatchChannel) {
2982 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2983 mDummyBatchStream.width;
2984 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2985 mDummyBatchStream.height;
2986 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2987 CAM_STREAM_TYPE_VIDEO;
2988 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2989 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2990 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2991 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2992 gCamCapability[mCameraId]->color_arrangement);
2993 mStreamConfigInfo.num_streams++;
2994 }
2995
2996 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2997 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002998 m_bIs4KVideo ? 0 :
2999 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003000
3001 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3002 for (pendingRequestIterator i = mPendingRequestsList.begin();
3003 i != mPendingRequestsList.end();) {
3004 i = erasePendingRequest(i);
3005 }
3006 mPendingFrameDropList.clear();
3007 // Initialize/Reset the pending buffers list
3008 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3009 req.mPendingBufferList.clear();
3010 }
3011 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3012
Thierry Strudel3d639192016-09-09 11:52:26 -07003013 mCurJpegMeta.clear();
3014 //Get min frame duration for this streams configuration
3015 deriveMinFrameDuration();
3016
Chien-Yu Chenee335912017-02-09 17:53:20 -08003017 mFirstPreviewIntentSeen = false;
3018
3019 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003020 {
3021 Mutex::Autolock l(gHdrPlusClientLock);
3022 disableHdrPlusModeLocked();
3023 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003024
Thierry Strudel3d639192016-09-09 11:52:26 -07003025 // Update state
3026 mState = CONFIGURED;
3027
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003028 mFirstMetadataCallback = true;
3029
Thierry Strudel3d639192016-09-09 11:52:26 -07003030 pthread_mutex_unlock(&mMutex);
3031
3032 return rc;
3033}
3034
3035/*===========================================================================
3036 * FUNCTION : validateCaptureRequest
3037 *
3038 * DESCRIPTION: validate a capture request from camera service
3039 *
3040 * PARAMETERS :
3041 * @request : request from framework to process
3042 *
3043 * RETURN :
3044 *
3045 *==========================================================================*/
3046int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003047 camera3_capture_request_t *request,
3048 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003049{
3050 ssize_t idx = 0;
3051 const camera3_stream_buffer_t *b;
3052 CameraMetadata meta;
3053
3054 /* Sanity check the request */
3055 if (request == NULL) {
3056 LOGE("NULL capture request");
3057 return BAD_VALUE;
3058 }
3059
3060 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3061 /*settings cannot be null for the first request*/
3062 return BAD_VALUE;
3063 }
3064
3065 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003066 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3067 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003068 LOGE("Request %d: No output buffers provided!",
3069 __FUNCTION__, frameNumber);
3070 return BAD_VALUE;
3071 }
3072 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3073 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3074 request->num_output_buffers, MAX_NUM_STREAMS);
3075 return BAD_VALUE;
3076 }
3077 if (request->input_buffer != NULL) {
3078 b = request->input_buffer;
3079 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3080 LOGE("Request %d: Buffer %ld: Status not OK!",
3081 frameNumber, (long)idx);
3082 return BAD_VALUE;
3083 }
3084 if (b->release_fence != -1) {
3085 LOGE("Request %d: Buffer %ld: Has a release fence!",
3086 frameNumber, (long)idx);
3087 return BAD_VALUE;
3088 }
3089 if (b->buffer == NULL) {
3090 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3091 frameNumber, (long)idx);
3092 return BAD_VALUE;
3093 }
3094 }
3095
3096 // Validate all buffers
3097 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003098 if (b == NULL) {
3099 return BAD_VALUE;
3100 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003101 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003102 QCamera3ProcessingChannel *channel =
3103 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3104 if (channel == NULL) {
3105 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3106 frameNumber, (long)idx);
3107 return BAD_VALUE;
3108 }
3109 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3110 LOGE("Request %d: Buffer %ld: Status not OK!",
3111 frameNumber, (long)idx);
3112 return BAD_VALUE;
3113 }
3114 if (b->release_fence != -1) {
3115 LOGE("Request %d: Buffer %ld: Has a release fence!",
3116 frameNumber, (long)idx);
3117 return BAD_VALUE;
3118 }
3119 if (b->buffer == NULL) {
3120 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3121 frameNumber, (long)idx);
3122 return BAD_VALUE;
3123 }
3124 if (*(b->buffer) == NULL) {
3125 LOGE("Request %d: Buffer %ld: NULL private handle!",
3126 frameNumber, (long)idx);
3127 return BAD_VALUE;
3128 }
3129 idx++;
3130 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003131 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003132 return NO_ERROR;
3133}
3134
3135/*===========================================================================
3136 * FUNCTION : deriveMinFrameDuration
3137 *
3138 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3139 * on currently configured streams.
3140 *
3141 * PARAMETERS : NONE
3142 *
3143 * RETURN : NONE
3144 *
3145 *==========================================================================*/
3146void QCamera3HardwareInterface::deriveMinFrameDuration()
3147{
3148 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003149 bool hasRaw = false;
3150
3151 mMinRawFrameDuration = 0;
3152 mMinJpegFrameDuration = 0;
3153 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003154
3155 maxJpegDim = 0;
3156 maxProcessedDim = 0;
3157 maxRawDim = 0;
3158
3159 // Figure out maximum jpeg, processed, and raw dimensions
3160 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3161 it != mStreamInfo.end(); it++) {
3162
3163 // Input stream doesn't have valid stream_type
3164 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3165 continue;
3166
3167 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3168 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3169 if (dimension > maxJpegDim)
3170 maxJpegDim = dimension;
3171 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3172 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3173 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003174 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003175 if (dimension > maxRawDim)
3176 maxRawDim = dimension;
3177 } else {
3178 if (dimension > maxProcessedDim)
3179 maxProcessedDim = dimension;
3180 }
3181 }
3182
3183 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3184 MAX_SIZES_CNT);
3185
3186 //Assume all jpeg dimensions are in processed dimensions.
3187 if (maxJpegDim > maxProcessedDim)
3188 maxProcessedDim = maxJpegDim;
3189 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003190 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003191 maxRawDim = INT32_MAX;
3192
3193 for (size_t i = 0; i < count; i++) {
3194 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3195 gCamCapability[mCameraId]->raw_dim[i].height;
3196 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3197 maxRawDim = dimension;
3198 }
3199 }
3200
3201 //Find minimum durations for processed, jpeg, and raw
3202 for (size_t i = 0; i < count; i++) {
3203 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3204 gCamCapability[mCameraId]->raw_dim[i].height) {
3205 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3206 break;
3207 }
3208 }
3209 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3210 for (size_t i = 0; i < count; i++) {
3211 if (maxProcessedDim ==
3212 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3213 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3214 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3215 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3216 break;
3217 }
3218 }
3219}
3220
3221/*===========================================================================
3222 * FUNCTION : getMinFrameDuration
3223 *
3224 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3225 * and current request configuration.
3226 *
3227 * PARAMETERS : @request: requset sent by the frameworks
3228 *
3229 * RETURN : min farme duration for a particular request
3230 *
3231 *==========================================================================*/
3232int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3233{
3234 bool hasJpegStream = false;
3235 bool hasRawStream = false;
3236 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3237 const camera3_stream_t *stream = request->output_buffers[i].stream;
3238 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3239 hasJpegStream = true;
3240 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3241 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3242 stream->format == HAL_PIXEL_FORMAT_RAW16)
3243 hasRawStream = true;
3244 }
3245
3246 if (!hasJpegStream)
3247 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3248 else
3249 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3250}
3251
3252/*===========================================================================
3253 * FUNCTION : handleBuffersDuringFlushLock
3254 *
3255 * DESCRIPTION: Account for buffers returned from back-end during flush
3256 * This function is executed while mMutex is held by the caller.
3257 *
3258 * PARAMETERS :
3259 * @buffer: image buffer for the callback
3260 *
3261 * RETURN :
3262 *==========================================================================*/
3263void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3264{
3265 bool buffer_found = false;
3266 for (List<PendingBuffersInRequest>::iterator req =
3267 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3268 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3269 for (List<PendingBufferInfo>::iterator i =
3270 req->mPendingBufferList.begin();
3271 i != req->mPendingBufferList.end(); i++) {
3272 if (i->buffer == buffer->buffer) {
3273 mPendingBuffersMap.numPendingBufsAtFlush--;
3274 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3275 buffer->buffer, req->frame_number,
3276 mPendingBuffersMap.numPendingBufsAtFlush);
3277 buffer_found = true;
3278 break;
3279 }
3280 }
3281 if (buffer_found) {
3282 break;
3283 }
3284 }
3285 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3286 //signal the flush()
3287 LOGD("All buffers returned to HAL. Continue flush");
3288 pthread_cond_signal(&mBuffersCond);
3289 }
3290}
3291
Thierry Strudel3d639192016-09-09 11:52:26 -07003292/*===========================================================================
3293 * FUNCTION : handleBatchMetadata
3294 *
3295 * DESCRIPTION: Handles metadata buffer callback in batch mode
3296 *
3297 * PARAMETERS : @metadata_buf: metadata buffer
3298 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3299 * the meta buf in this method
3300 *
3301 * RETURN :
3302 *
3303 *==========================================================================*/
3304void QCamera3HardwareInterface::handleBatchMetadata(
3305 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3306{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003307 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003308
3309 if (NULL == metadata_buf) {
3310 LOGE("metadata_buf is NULL");
3311 return;
3312 }
3313 /* In batch mode, the metdata will contain the frame number and timestamp of
3314 * the last frame in the batch. Eg: a batch containing buffers from request
3315 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3316 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3317 * multiple process_capture_results */
3318 metadata_buffer_t *metadata =
3319 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3320 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3321 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3322 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3323 uint32_t frame_number = 0, urgent_frame_number = 0;
3324 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3325 bool invalid_metadata = false;
3326 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3327 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003328 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003329
3330 int32_t *p_frame_number_valid =
3331 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3332 uint32_t *p_frame_number =
3333 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3334 int64_t *p_capture_time =
3335 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3336 int32_t *p_urgent_frame_number_valid =
3337 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3338 uint32_t *p_urgent_frame_number =
3339 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3340
3341 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3342 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3343 (NULL == p_urgent_frame_number)) {
3344 LOGE("Invalid metadata");
3345 invalid_metadata = true;
3346 } else {
3347 frame_number_valid = *p_frame_number_valid;
3348 last_frame_number = *p_frame_number;
3349 last_frame_capture_time = *p_capture_time;
3350 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3351 last_urgent_frame_number = *p_urgent_frame_number;
3352 }
3353
3354 /* In batchmode, when no video buffers are requested, set_parms are sent
3355 * for every capture_request. The difference between consecutive urgent
3356 * frame numbers and frame numbers should be used to interpolate the
3357 * corresponding frame numbers and time stamps */
3358 pthread_mutex_lock(&mMutex);
3359 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003360 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3361 if(idx < 0) {
3362 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3363 last_urgent_frame_number);
3364 mState = ERROR;
3365 pthread_mutex_unlock(&mMutex);
3366 return;
3367 }
3368 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003369 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3370 first_urgent_frame_number;
3371
3372 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3373 urgent_frame_number_valid,
3374 first_urgent_frame_number, last_urgent_frame_number);
3375 }
3376
3377 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003378 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3379 if(idx < 0) {
3380 LOGE("Invalid frame number received: %d. Irrecoverable error",
3381 last_frame_number);
3382 mState = ERROR;
3383 pthread_mutex_unlock(&mMutex);
3384 return;
3385 }
3386 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003387 frameNumDiff = last_frame_number + 1 -
3388 first_frame_number;
3389 mPendingBatchMap.removeItem(last_frame_number);
3390
3391 LOGD("frm: valid: %d frm_num: %d - %d",
3392 frame_number_valid,
3393 first_frame_number, last_frame_number);
3394
3395 }
3396 pthread_mutex_unlock(&mMutex);
3397
3398 if (urgent_frame_number_valid || frame_number_valid) {
3399 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3400 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3401 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3402 urgentFrameNumDiff, last_urgent_frame_number);
3403 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3404 LOGE("frameNumDiff: %d frameNum: %d",
3405 frameNumDiff, last_frame_number);
3406 }
3407
3408 for (size_t i = 0; i < loopCount; i++) {
3409 /* handleMetadataWithLock is called even for invalid_metadata for
3410 * pipeline depth calculation */
3411 if (!invalid_metadata) {
3412 /* Infer frame number. Batch metadata contains frame number of the
3413 * last frame */
3414 if (urgent_frame_number_valid) {
3415 if (i < urgentFrameNumDiff) {
3416 urgent_frame_number =
3417 first_urgent_frame_number + i;
3418 LOGD("inferred urgent frame_number: %d",
3419 urgent_frame_number);
3420 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3421 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3422 } else {
3423 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3424 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3425 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3426 }
3427 }
3428
3429 /* Infer frame number. Batch metadata contains frame number of the
3430 * last frame */
3431 if (frame_number_valid) {
3432 if (i < frameNumDiff) {
3433 frame_number = first_frame_number + i;
3434 LOGD("inferred frame_number: %d", frame_number);
3435 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3436 CAM_INTF_META_FRAME_NUMBER, frame_number);
3437 } else {
3438 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3439 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3440 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3441 }
3442 }
3443
3444 if (last_frame_capture_time) {
3445 //Infer timestamp
3446 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003447 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003448 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003449 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003450 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3451 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3452 LOGD("batch capture_time: %lld, capture_time: %lld",
3453 last_frame_capture_time, capture_time);
3454 }
3455 }
3456 pthread_mutex_lock(&mMutex);
3457 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003458 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003459 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3460 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003461 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003462 pthread_mutex_unlock(&mMutex);
3463 }
3464
3465 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003466 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003467 mMetadataChannel->bufDone(metadata_buf);
3468 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003469 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003470 }
3471}
3472
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003473void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3474 camera3_error_msg_code_t errorCode)
3475{
3476 camera3_notify_msg_t notify_msg;
3477 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3478 notify_msg.type = CAMERA3_MSG_ERROR;
3479 notify_msg.message.error.error_code = errorCode;
3480 notify_msg.message.error.error_stream = NULL;
3481 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003482 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003483
3484 return;
3485}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003486
3487/*===========================================================================
3488 * FUNCTION : sendPartialMetadataWithLock
3489 *
3490 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3491 *
3492 * PARAMETERS : @metadata: metadata buffer
3493 * @requestIter: The iterator for the pending capture request for
3494 * which the partial result is being sen
3495 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3496 * last urgent metadata in a batch. Always true for non-batch mode
3497 *
3498 * RETURN :
3499 *
3500 *==========================================================================*/
3501
3502void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3503 metadata_buffer_t *metadata,
3504 const pendingRequestIterator requestIter,
3505 bool lastUrgentMetadataInBatch)
3506{
3507 camera3_capture_result_t result;
3508 memset(&result, 0, sizeof(camera3_capture_result_t));
3509
3510 requestIter->partial_result_cnt++;
3511
3512 // Extract 3A metadata
3513 result.result = translateCbUrgentMetadataToResultMetadata(
3514 metadata, lastUrgentMetadataInBatch);
3515 // Populate metadata result
3516 result.frame_number = requestIter->frame_number;
3517 result.num_output_buffers = 0;
3518 result.output_buffers = NULL;
3519 result.partial_result = requestIter->partial_result_cnt;
3520
3521 {
3522 Mutex::Autolock l(gHdrPlusClientLock);
3523 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3524 // Notify HDR+ client about the partial metadata.
3525 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3526 result.partial_result == PARTIAL_RESULT_COUNT);
3527 }
3528 }
3529
3530 orchestrateResult(&result);
3531 LOGD("urgent frame_number = %u", result.frame_number);
3532 free_camera_metadata((camera_metadata_t *)result.result);
3533}
3534
Thierry Strudel3d639192016-09-09 11:52:26 -07003535/*===========================================================================
3536 * FUNCTION : handleMetadataWithLock
3537 *
3538 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3539 *
3540 * PARAMETERS : @metadata_buf: metadata buffer
3541 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3542 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003543 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3544 * last urgent metadata in a batch. Always true for non-batch mode
3545 * @lastMetadataInBatch: Boolean to indicate whether this is the
3546 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003547 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3548 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003549 *
3550 * RETURN :
3551 *
3552 *==========================================================================*/
3553void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003554 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003555 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3556 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003557{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003558 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003559 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3560 //during flush do not send metadata from this thread
3561 LOGD("not sending metadata during flush or when mState is error");
3562 if (free_and_bufdone_meta_buf) {
3563 mMetadataChannel->bufDone(metadata_buf);
3564 free(metadata_buf);
3565 }
3566 return;
3567 }
3568
3569 //not in flush
3570 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3571 int32_t frame_number_valid, urgent_frame_number_valid;
3572 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003573 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003574 nsecs_t currentSysTime;
3575
3576 int32_t *p_frame_number_valid =
3577 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3578 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3579 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003580 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003581 int32_t *p_urgent_frame_number_valid =
3582 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3583 uint32_t *p_urgent_frame_number =
3584 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3585 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3586 metadata) {
3587 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3588 *p_frame_number_valid, *p_frame_number);
3589 }
3590
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003591 camera_metadata_t *resultMetadata = nullptr;
3592
Thierry Strudel3d639192016-09-09 11:52:26 -07003593 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3594 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3595 LOGE("Invalid metadata");
3596 if (free_and_bufdone_meta_buf) {
3597 mMetadataChannel->bufDone(metadata_buf);
3598 free(metadata_buf);
3599 }
3600 goto done_metadata;
3601 }
3602 frame_number_valid = *p_frame_number_valid;
3603 frame_number = *p_frame_number;
3604 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003605 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003606 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3607 urgent_frame_number = *p_urgent_frame_number;
3608 currentSysTime = systemTime(CLOCK_MONOTONIC);
3609
Jason Lee603176d2017-05-31 11:43:27 -07003610 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3611 const int tries = 3;
3612 nsecs_t bestGap, measured;
3613 for (int i = 0; i < tries; ++i) {
3614 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3615 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3616 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3617 const nsecs_t gap = tmono2 - tmono;
3618 if (i == 0 || gap < bestGap) {
3619 bestGap = gap;
3620 measured = tbase - ((tmono + tmono2) >> 1);
3621 }
3622 }
3623 capture_time -= measured;
3624 }
3625
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 // Detect if buffers from any requests are overdue
3627 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003628 int64_t timeout;
3629 {
3630 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3631 // If there is a pending HDR+ request, the following requests may be blocked until the
3632 // HDR+ request is done. So allow a longer timeout.
3633 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3634 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3635 }
3636
3637 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003638 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003639 assert(missed.stream->priv);
3640 if (missed.stream->priv) {
3641 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3642 assert(ch->mStreams[0]);
3643 if (ch->mStreams[0]) {
3644 LOGE("Cancel missing frame = %d, buffer = %p,"
3645 "stream type = %d, stream format = %d",
3646 req.frame_number, missed.buffer,
3647 ch->mStreams[0]->getMyType(), missed.stream->format);
3648 ch->timeoutFrame(req.frame_number);
3649 }
3650 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003651 }
3652 }
3653 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003654 //For the very first metadata callback, regardless whether it contains valid
3655 //frame number, send the partial metadata for the jumpstarting requests.
3656 //Note that this has to be done even if the metadata doesn't contain valid
3657 //urgent frame number, because in the case only 1 request is ever submitted
3658 //to HAL, there won't be subsequent valid urgent frame number.
3659 if (mFirstMetadataCallback) {
3660 for (pendingRequestIterator i =
3661 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3662 if (i->bUseFirstPartial) {
3663 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3664 }
3665 }
3666 mFirstMetadataCallback = false;
3667 }
3668
Thierry Strudel3d639192016-09-09 11:52:26 -07003669 //Partial result on process_capture_result for timestamp
3670 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003671 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003672
3673 //Recieved an urgent Frame Number, handle it
3674 //using partial results
3675 for (pendingRequestIterator i =
3676 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3677 LOGD("Iterator Frame = %d urgent frame = %d",
3678 i->frame_number, urgent_frame_number);
3679
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003680 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003681 (i->partial_result_cnt == 0)) {
3682 LOGE("Error: HAL missed urgent metadata for frame number %d",
3683 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003684 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003685 }
3686
3687 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003688 i->partial_result_cnt == 0) {
3689 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003690 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3691 // Instant AEC settled for this frame.
3692 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3693 mInstantAECSettledFrameNumber = urgent_frame_number;
3694 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003695 break;
3696 }
3697 }
3698 }
3699
3700 if (!frame_number_valid) {
3701 LOGD("Not a valid normal frame number, used as SOF only");
3702 if (free_and_bufdone_meta_buf) {
3703 mMetadataChannel->bufDone(metadata_buf);
3704 free(metadata_buf);
3705 }
3706 goto done_metadata;
3707 }
3708 LOGH("valid frame_number = %u, capture_time = %lld",
3709 frame_number, capture_time);
3710
Emilian Peev7650c122017-01-19 08:24:33 -08003711 if (metadata->is_depth_data_valid) {
3712 handleDepthDataLocked(metadata->depth_data, frame_number);
3713 }
3714
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003715 // Check whether any stream buffer corresponding to this is dropped or not
3716 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3717 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3718 for (auto & pendingRequest : mPendingRequestsList) {
3719 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3720 mInstantAECSettledFrameNumber)) {
3721 camera3_notify_msg_t notify_msg = {};
3722 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003723 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003724 QCamera3ProcessingChannel *channel =
3725 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003726 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003727 if (p_cam_frame_drop) {
3728 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003729 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003730 // Got the stream ID for drop frame.
3731 dropFrame = true;
3732 break;
3733 }
3734 }
3735 } else {
3736 // This is instant AEC case.
3737 // For instant AEC drop the stream untill AEC is settled.
3738 dropFrame = true;
3739 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003740
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003741 if (dropFrame) {
3742 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3743 if (p_cam_frame_drop) {
3744 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003745 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003746 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003747 } else {
3748 // For instant AEC, inform frame drop and frame number
3749 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3750 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003751 pendingRequest.frame_number, streamID,
3752 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003753 }
3754 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003755 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003756 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003757 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003758 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003759 if (p_cam_frame_drop) {
3760 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003761 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003762 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003763 } else {
3764 // For instant AEC, inform frame drop and frame number
3765 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3766 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003767 pendingRequest.frame_number, streamID,
3768 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003769 }
3770 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003771 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003772 PendingFrameDrop.stream_ID = streamID;
3773 // Add the Frame drop info to mPendingFrameDropList
3774 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003775 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003776 }
3777 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003778 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003779
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003780 for (auto & pendingRequest : mPendingRequestsList) {
3781 // Find the pending request with the frame number.
3782 if (pendingRequest.frame_number == frame_number) {
3783 // Update the sensor timestamp.
3784 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003785
Thierry Strudel3d639192016-09-09 11:52:26 -07003786
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003787 /* Set the timestamp in display metadata so that clients aware of
3788 private_handle such as VT can use this un-modified timestamps.
3789 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003790 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003791
Thierry Strudel3d639192016-09-09 11:52:26 -07003792 // Find channel requiring metadata, meaning internal offline postprocess
3793 // is needed.
3794 //TODO: for now, we don't support two streams requiring metadata at the same time.
3795 // (because we are not making copies, and metadata buffer is not reference counted.
3796 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003797 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3798 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003799 if (iter->need_metadata) {
3800 internalPproc = true;
3801 QCamera3ProcessingChannel *channel =
3802 (QCamera3ProcessingChannel *)iter->stream->priv;
3803 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003804 if(p_is_metabuf_queued != NULL) {
3805 *p_is_metabuf_queued = true;
3806 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003807 break;
3808 }
3809 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003810 for (auto itr = pendingRequest.internalRequestList.begin();
3811 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003812 if (itr->need_metadata) {
3813 internalPproc = true;
3814 QCamera3ProcessingChannel *channel =
3815 (QCamera3ProcessingChannel *)itr->stream->priv;
3816 channel->queueReprocMetadata(metadata_buf);
3817 break;
3818 }
3819 }
3820
Thierry Strudel54dc9782017-02-15 12:12:10 -08003821 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003822
3823 bool *enableZsl = nullptr;
3824 if (gExposeEnableZslKey) {
3825 enableZsl = &pendingRequest.enableZsl;
3826 }
3827
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003828 resultMetadata = translateFromHalMetadata(metadata,
3829 pendingRequest.timestamp, pendingRequest.request_id,
3830 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3831 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003832 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003833 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003834 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003835 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003836 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003837 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003838
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003839 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003840
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003841 if (pendingRequest.blob_request) {
3842 //Dump tuning metadata if enabled and available
3843 char prop[PROPERTY_VALUE_MAX];
3844 memset(prop, 0, sizeof(prop));
3845 property_get("persist.camera.dumpmetadata", prop, "0");
3846 int32_t enabled = atoi(prop);
3847 if (enabled && metadata->is_tuning_params_valid) {
3848 dumpMetadataToFile(metadata->tuning_params,
3849 mMetaFrameCount,
3850 enabled,
3851 "Snapshot",
3852 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003853 }
3854 }
3855
3856 if (!internalPproc) {
3857 LOGD("couldn't find need_metadata for this metadata");
3858 // Return metadata buffer
3859 if (free_and_bufdone_meta_buf) {
3860 mMetadataChannel->bufDone(metadata_buf);
3861 free(metadata_buf);
3862 }
3863 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003864
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003865 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003866 }
3867 }
3868
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003869 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3870
3871 // Try to send out capture result metadata.
3872 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003873 return;
3874
Thierry Strudel3d639192016-09-09 11:52:26 -07003875done_metadata:
3876 for (pendingRequestIterator i = mPendingRequestsList.begin();
3877 i != mPendingRequestsList.end() ;i++) {
3878 i->pipeline_depth++;
3879 }
3880 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3881 unblockRequestIfNecessary();
3882}
3883
3884/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003885 * FUNCTION : handleDepthDataWithLock
3886 *
3887 * DESCRIPTION: Handles incoming depth data
3888 *
3889 * PARAMETERS : @depthData : Depth data
3890 * @frameNumber: Frame number of the incoming depth data
3891 *
3892 * RETURN :
3893 *
3894 *==========================================================================*/
3895void QCamera3HardwareInterface::handleDepthDataLocked(
3896 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3897 uint32_t currentFrameNumber;
3898 buffer_handle_t *depthBuffer;
3899
3900 if (nullptr == mDepthChannel) {
3901 LOGE("Depth channel not present!");
3902 return;
3903 }
3904
3905 camera3_stream_buffer_t resultBuffer =
3906 {.acquire_fence = -1,
3907 .release_fence = -1,
3908 .status = CAMERA3_BUFFER_STATUS_OK,
3909 .buffer = nullptr,
3910 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003911 do {
3912 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3913 if (nullptr == depthBuffer) {
3914 break;
3915 }
3916
Emilian Peev7650c122017-01-19 08:24:33 -08003917 resultBuffer.buffer = depthBuffer;
3918 if (currentFrameNumber == frameNumber) {
3919 int32_t rc = mDepthChannel->populateDepthData(depthData,
3920 frameNumber);
3921 if (NO_ERROR != rc) {
3922 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3923 } else {
3924 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3925 }
3926 } else if (currentFrameNumber > frameNumber) {
3927 break;
3928 } else {
3929 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3930 {{currentFrameNumber, mDepthChannel->getStream(),
3931 CAMERA3_MSG_ERROR_BUFFER}}};
3932 orchestrateNotify(&notify_msg);
3933
3934 LOGE("Depth buffer for frame number: %d is missing "
3935 "returning back!", currentFrameNumber);
3936 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3937 }
3938 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003939 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003940 } while (currentFrameNumber < frameNumber);
3941}
3942
3943/*===========================================================================
3944 * FUNCTION : notifyErrorFoPendingDepthData
3945 *
3946 * DESCRIPTION: Returns error for any pending depth buffers
3947 *
3948 * PARAMETERS : depthCh - depth channel that needs to get flushed
3949 *
3950 * RETURN :
3951 *
3952 *==========================================================================*/
3953void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3954 QCamera3DepthChannel *depthCh) {
3955 uint32_t currentFrameNumber;
3956 buffer_handle_t *depthBuffer;
3957
3958 if (nullptr == depthCh) {
3959 return;
3960 }
3961
3962 camera3_notify_msg_t notify_msg =
3963 {.type = CAMERA3_MSG_ERROR,
3964 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3965 camera3_stream_buffer_t resultBuffer =
3966 {.acquire_fence = -1,
3967 .release_fence = -1,
3968 .buffer = nullptr,
3969 .stream = depthCh->getStream(),
3970 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003971
3972 while (nullptr !=
3973 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3974 depthCh->unmapBuffer(currentFrameNumber);
3975
3976 notify_msg.message.error.frame_number = currentFrameNumber;
3977 orchestrateNotify(&notify_msg);
3978
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003979 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003980 };
3981}
3982
3983/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003984 * FUNCTION : hdrPlusPerfLock
3985 *
3986 * DESCRIPTION: perf lock for HDR+ using custom intent
3987 *
3988 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3989 *
3990 * RETURN : None
3991 *
3992 *==========================================================================*/
3993void QCamera3HardwareInterface::hdrPlusPerfLock(
3994 mm_camera_super_buf_t *metadata_buf)
3995{
3996 if (NULL == metadata_buf) {
3997 LOGE("metadata_buf is NULL");
3998 return;
3999 }
4000 metadata_buffer_t *metadata =
4001 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4002 int32_t *p_frame_number_valid =
4003 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4004 uint32_t *p_frame_number =
4005 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4006
4007 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4008 LOGE("%s: Invalid metadata", __func__);
4009 return;
4010 }
4011
4012 //acquire perf lock for 5 sec after the last HDR frame is captured
4013 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4014 if ((p_frame_number != NULL) &&
4015 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004016 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004017 }
4018 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004019}
4020
4021/*===========================================================================
4022 * FUNCTION : handleInputBufferWithLock
4023 *
4024 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4025 *
4026 * PARAMETERS : @frame_number: frame number of the input buffer
4027 *
4028 * RETURN :
4029 *
4030 *==========================================================================*/
4031void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4032{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004033 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004034 pendingRequestIterator i = mPendingRequestsList.begin();
4035 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4036 i++;
4037 }
4038 if (i != mPendingRequestsList.end() && i->input_buffer) {
4039 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004040 CameraMetadata settings;
4041 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4042 if(i->settings) {
4043 settings = i->settings;
4044 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4045 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004046 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004047 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004048 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004049 } else {
4050 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004051 }
4052
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004053 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4054 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4055 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004056
4057 camera3_capture_result result;
4058 memset(&result, 0, sizeof(camera3_capture_result));
4059 result.frame_number = frame_number;
4060 result.result = i->settings;
4061 result.input_buffer = i->input_buffer;
4062 result.partial_result = PARTIAL_RESULT_COUNT;
4063
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004064 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004065 LOGD("Input request metadata and input buffer frame_number = %u",
4066 i->frame_number);
4067 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004068
4069 // Dispatch result metadata that may be just unblocked by this reprocess result.
4070 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004071 } else {
4072 LOGE("Could not find input request for frame number %d", frame_number);
4073 }
4074}
4075
4076/*===========================================================================
4077 * FUNCTION : handleBufferWithLock
4078 *
4079 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4080 *
4081 * PARAMETERS : @buffer: image buffer for the callback
4082 * @frame_number: frame number of the image buffer
4083 *
4084 * RETURN :
4085 *
4086 *==========================================================================*/
4087void QCamera3HardwareInterface::handleBufferWithLock(
4088 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4089{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004090 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004091
4092 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4093 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4094 }
4095
Thierry Strudel3d639192016-09-09 11:52:26 -07004096 /* Nothing to be done during error state */
4097 if ((ERROR == mState) || (DEINIT == mState)) {
4098 return;
4099 }
4100 if (mFlushPerf) {
4101 handleBuffersDuringFlushLock(buffer);
4102 return;
4103 }
4104 //not in flush
4105 // If the frame number doesn't exist in the pending request list,
4106 // directly send the buffer to the frameworks, and update pending buffers map
4107 // Otherwise, book-keep the buffer.
4108 pendingRequestIterator i = mPendingRequestsList.begin();
4109 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4110 i++;
4111 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004112
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004113 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004114 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004115 // For a reprocessing request, try to send out result metadata.
4116 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004117 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004118 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004119
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004120 // Check if this frame was dropped.
4121 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4122 m != mPendingFrameDropList.end(); m++) {
4123 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4124 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4125 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4126 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4127 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4128 frame_number, streamID);
4129 m = mPendingFrameDropList.erase(m);
4130 break;
4131 }
4132 }
4133
4134 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4135 LOGH("result frame_number = %d, buffer = %p",
4136 frame_number, buffer->buffer);
4137
4138 mPendingBuffersMap.removeBuf(buffer->buffer);
4139 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4140
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004141 if (mPreviewStarted == false) {
4142 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4143 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004144 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4145
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004146 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4147 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4148 mPreviewStarted = true;
4149
4150 // Set power hint for preview
4151 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4152 }
4153 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004154}
4155
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004156void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004157 const camera_metadata_t *resultMetadata)
4158{
4159 // Find the pending request for this result metadata.
4160 auto requestIter = mPendingRequestsList.begin();
4161 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4162 requestIter++;
4163 }
4164
4165 if (requestIter == mPendingRequestsList.end()) {
4166 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4167 return;
4168 }
4169
4170 // Update the result metadata
4171 requestIter->resultMetadata = resultMetadata;
4172
4173 // Check what type of request this is.
4174 bool liveRequest = false;
4175 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004176 // HDR+ request doesn't have partial results.
4177 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004178 } else if (requestIter->input_buffer != nullptr) {
4179 // Reprocessing request result is the same as settings.
4180 requestIter->resultMetadata = requestIter->settings;
4181 // Reprocessing request doesn't have partial results.
4182 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4183 } else {
4184 liveRequest = true;
4185 requestIter->partial_result_cnt++;
4186 mPendingLiveRequest--;
4187
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004188 {
4189 Mutex::Autolock l(gHdrPlusClientLock);
4190 // For a live request, send the metadata to HDR+ client.
4191 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4192 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4193 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4194 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004195 }
4196 }
4197
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004198 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4199}
4200
4201void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4202 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004203 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4204 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004205 bool readyToSend = true;
4206
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004207 // Iterate through the pending requests to send out result metadata that are ready. Also if
4208 // this result metadata belongs to a live request, notify errors for previous live requests
4209 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004210 auto iter = mPendingRequestsList.begin();
4211 while (iter != mPendingRequestsList.end()) {
4212 // Check if current pending request is ready. If it's not ready, the following pending
4213 // requests are also not ready.
4214 if (readyToSend && iter->resultMetadata == nullptr) {
4215 readyToSend = false;
4216 }
4217
4218 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4219
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004220 camera3_capture_result_t result = {};
4221 result.frame_number = iter->frame_number;
4222 result.result = iter->resultMetadata;
4223 result.partial_result = iter->partial_result_cnt;
4224
4225 // If this pending buffer has result metadata, we may be able to send out shutter callback
4226 // and result metadata.
4227 if (iter->resultMetadata != nullptr) {
4228 if (!readyToSend) {
4229 // If any of the previous pending request is not ready, this pending request is
4230 // also not ready to send in order to keep shutter callbacks and result metadata
4231 // in order.
4232 iter++;
4233 continue;
4234 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004235 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004236 // If the result metadata belongs to a live request, notify errors for previous pending
4237 // live requests.
4238 mPendingLiveRequest--;
4239
4240 CameraMetadata dummyMetadata;
4241 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4242 result.result = dummyMetadata.release();
4243
4244 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004245
4246 // partial_result should be PARTIAL_RESULT_CNT in case of
4247 // ERROR_RESULT.
4248 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4249 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004250 } else {
4251 iter++;
4252 continue;
4253 }
4254
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004255 result.output_buffers = nullptr;
4256 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004257 orchestrateResult(&result);
4258
4259 // For reprocessing, result metadata is the same as settings so do not free it here to
4260 // avoid double free.
4261 if (result.result != iter->settings) {
4262 free_camera_metadata((camera_metadata_t *)result.result);
4263 }
4264 iter->resultMetadata = nullptr;
4265 iter = erasePendingRequest(iter);
4266 }
4267
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004268 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004269 for (auto &iter : mPendingRequestsList) {
4270 // Increment pipeline depth for the following pending requests.
4271 if (iter.frame_number > frameNumber) {
4272 iter.pipeline_depth++;
4273 }
4274 }
4275 }
4276
4277 unblockRequestIfNecessary();
4278}
4279
Thierry Strudel3d639192016-09-09 11:52:26 -07004280/*===========================================================================
4281 * FUNCTION : unblockRequestIfNecessary
4282 *
4283 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4284 * that mMutex is held when this function is called.
4285 *
4286 * PARAMETERS :
4287 *
4288 * RETURN :
4289 *
4290 *==========================================================================*/
4291void QCamera3HardwareInterface::unblockRequestIfNecessary()
4292{
4293 // Unblock process_capture_request
4294 pthread_cond_signal(&mRequestCond);
4295}
4296
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004297/*===========================================================================
4298 * FUNCTION : isHdrSnapshotRequest
4299 *
4300 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4301 *
4302 * PARAMETERS : camera3 request structure
4303 *
4304 * RETURN : boolean decision variable
4305 *
4306 *==========================================================================*/
4307bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4308{
4309 if (request == NULL) {
4310 LOGE("Invalid request handle");
4311 assert(0);
4312 return false;
4313 }
4314
4315 if (!mForceHdrSnapshot) {
4316 CameraMetadata frame_settings;
4317 frame_settings = request->settings;
4318
4319 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4320 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4321 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4322 return false;
4323 }
4324 } else {
4325 return false;
4326 }
4327
4328 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4329 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4330 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4331 return false;
4332 }
4333 } else {
4334 return false;
4335 }
4336 }
4337
4338 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4339 if (request->output_buffers[i].stream->format
4340 == HAL_PIXEL_FORMAT_BLOB) {
4341 return true;
4342 }
4343 }
4344
4345 return false;
4346}
4347/*===========================================================================
4348 * FUNCTION : orchestrateRequest
4349 *
4350 * DESCRIPTION: Orchestrates a capture request from camera service
4351 *
4352 * PARAMETERS :
4353 * @request : request from framework to process
4354 *
4355 * RETURN : Error status codes
4356 *
4357 *==========================================================================*/
4358int32_t QCamera3HardwareInterface::orchestrateRequest(
4359 camera3_capture_request_t *request)
4360{
4361
4362 uint32_t originalFrameNumber = request->frame_number;
4363 uint32_t originalOutputCount = request->num_output_buffers;
4364 const camera_metadata_t *original_settings = request->settings;
4365 List<InternalRequest> internallyRequestedStreams;
4366 List<InternalRequest> emptyInternalList;
4367
4368 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4369 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4370 uint32_t internalFrameNumber;
4371 CameraMetadata modified_meta;
4372
4373
4374 /* Add Blob channel to list of internally requested streams */
4375 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4376 if (request->output_buffers[i].stream->format
4377 == HAL_PIXEL_FORMAT_BLOB) {
4378 InternalRequest streamRequested;
4379 streamRequested.meteringOnly = 1;
4380 streamRequested.need_metadata = 0;
4381 streamRequested.stream = request->output_buffers[i].stream;
4382 internallyRequestedStreams.push_back(streamRequested);
4383 }
4384 }
4385 request->num_output_buffers = 0;
4386 auto itr = internallyRequestedStreams.begin();
4387
4388 /* Modify setting to set compensation */
4389 modified_meta = request->settings;
4390 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4391 uint8_t aeLock = 1;
4392 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4393 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4394 camera_metadata_t *modified_settings = modified_meta.release();
4395 request->settings = modified_settings;
4396
4397 /* Capture Settling & -2x frame */
4398 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4399 request->frame_number = internalFrameNumber;
4400 processCaptureRequest(request, internallyRequestedStreams);
4401
4402 request->num_output_buffers = originalOutputCount;
4403 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4404 request->frame_number = internalFrameNumber;
4405 processCaptureRequest(request, emptyInternalList);
4406 request->num_output_buffers = 0;
4407
4408 modified_meta = modified_settings;
4409 expCompensation = 0;
4410 aeLock = 1;
4411 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4412 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4413 modified_settings = modified_meta.release();
4414 request->settings = modified_settings;
4415
4416 /* Capture Settling & 0X frame */
4417
4418 itr = internallyRequestedStreams.begin();
4419 if (itr == internallyRequestedStreams.end()) {
4420 LOGE("Error Internally Requested Stream list is empty");
4421 assert(0);
4422 } else {
4423 itr->need_metadata = 0;
4424 itr->meteringOnly = 1;
4425 }
4426
4427 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4428 request->frame_number = internalFrameNumber;
4429 processCaptureRequest(request, internallyRequestedStreams);
4430
4431 itr = internallyRequestedStreams.begin();
4432 if (itr == internallyRequestedStreams.end()) {
4433 ALOGE("Error Internally Requested Stream list is empty");
4434 assert(0);
4435 } else {
4436 itr->need_metadata = 1;
4437 itr->meteringOnly = 0;
4438 }
4439
4440 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4441 request->frame_number = internalFrameNumber;
4442 processCaptureRequest(request, internallyRequestedStreams);
4443
4444 /* Capture 2X frame*/
4445 modified_meta = modified_settings;
4446 expCompensation = GB_HDR_2X_STEP_EV;
4447 aeLock = 1;
4448 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4449 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4450 modified_settings = modified_meta.release();
4451 request->settings = modified_settings;
4452
4453 itr = internallyRequestedStreams.begin();
4454 if (itr == internallyRequestedStreams.end()) {
4455 ALOGE("Error Internally Requested Stream list is empty");
4456 assert(0);
4457 } else {
4458 itr->need_metadata = 0;
4459 itr->meteringOnly = 1;
4460 }
4461 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4462 request->frame_number = internalFrameNumber;
4463 processCaptureRequest(request, internallyRequestedStreams);
4464
4465 itr = internallyRequestedStreams.begin();
4466 if (itr == internallyRequestedStreams.end()) {
4467 ALOGE("Error Internally Requested Stream list is empty");
4468 assert(0);
4469 } else {
4470 itr->need_metadata = 1;
4471 itr->meteringOnly = 0;
4472 }
4473
4474 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4475 request->frame_number = internalFrameNumber;
4476 processCaptureRequest(request, internallyRequestedStreams);
4477
4478
4479 /* Capture 2X on original streaming config*/
4480 internallyRequestedStreams.clear();
4481
4482 /* Restore original settings pointer */
4483 request->settings = original_settings;
4484 } else {
4485 uint32_t internalFrameNumber;
4486 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4487 request->frame_number = internalFrameNumber;
4488 return processCaptureRequest(request, internallyRequestedStreams);
4489 }
4490
4491 return NO_ERROR;
4492}
4493
4494/*===========================================================================
4495 * FUNCTION : orchestrateResult
4496 *
4497 * DESCRIPTION: Orchestrates a capture result to camera service
4498 *
4499 * PARAMETERS :
4500 * @request : request from framework to process
4501 *
4502 * RETURN :
4503 *
4504 *==========================================================================*/
4505void QCamera3HardwareInterface::orchestrateResult(
4506 camera3_capture_result_t *result)
4507{
4508 uint32_t frameworkFrameNumber;
4509 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4510 frameworkFrameNumber);
4511 if (rc != NO_ERROR) {
4512 LOGE("Cannot find translated frameworkFrameNumber");
4513 assert(0);
4514 } else {
4515 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004516 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004517 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004518 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004519 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4520 camera_metadata_entry_t entry;
4521 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4522 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004523 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004524 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4525 if (ret != OK)
4526 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004527 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004528 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004529 result->frame_number = frameworkFrameNumber;
4530 mCallbackOps->process_capture_result(mCallbackOps, result);
4531 }
4532 }
4533}
4534
4535/*===========================================================================
4536 * FUNCTION : orchestrateNotify
4537 *
4538 * DESCRIPTION: Orchestrates a notify to camera service
4539 *
4540 * PARAMETERS :
4541 * @request : request from framework to process
4542 *
4543 * RETURN :
4544 *
4545 *==========================================================================*/
4546void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4547{
4548 uint32_t frameworkFrameNumber;
4549 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004550 int32_t rc = NO_ERROR;
4551
4552 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004553 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004554
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004555 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004556 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4557 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4558 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004559 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004560 LOGE("Cannot find translated frameworkFrameNumber");
4561 assert(0);
4562 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004563 }
4564 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004565
4566 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4567 LOGD("Internal Request drop the notifyCb");
4568 } else {
4569 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4570 mCallbackOps->notify(mCallbackOps, notify_msg);
4571 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004572}
4573
4574/*===========================================================================
4575 * FUNCTION : FrameNumberRegistry
4576 *
4577 * DESCRIPTION: Constructor
4578 *
4579 * PARAMETERS :
4580 *
4581 * RETURN :
4582 *
4583 *==========================================================================*/
4584FrameNumberRegistry::FrameNumberRegistry()
4585{
4586 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4587}
4588
4589/*===========================================================================
4590 * FUNCTION : ~FrameNumberRegistry
4591 *
4592 * DESCRIPTION: Destructor
4593 *
4594 * PARAMETERS :
4595 *
4596 * RETURN :
4597 *
4598 *==========================================================================*/
4599FrameNumberRegistry::~FrameNumberRegistry()
4600{
4601}
4602
4603/*===========================================================================
4604 * FUNCTION : PurgeOldEntriesLocked
4605 *
4606 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4607 *
4608 * PARAMETERS :
4609 *
4610 * RETURN : NONE
4611 *
4612 *==========================================================================*/
4613void FrameNumberRegistry::purgeOldEntriesLocked()
4614{
4615 while (_register.begin() != _register.end()) {
4616 auto itr = _register.begin();
4617 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4618 _register.erase(itr);
4619 } else {
4620 return;
4621 }
4622 }
4623}
4624
4625/*===========================================================================
4626 * FUNCTION : allocStoreInternalFrameNumber
4627 *
4628 * DESCRIPTION: Method to note down a framework request and associate a new
4629 * internal request number against it
4630 *
4631 * PARAMETERS :
4632 * @fFrameNumber: Identifier given by framework
4633 * @internalFN : Output parameter which will have the newly generated internal
4634 * entry
4635 *
4636 * RETURN : Error code
4637 *
4638 *==========================================================================*/
4639int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4640 uint32_t &internalFrameNumber)
4641{
4642 Mutex::Autolock lock(mRegistryLock);
4643 internalFrameNumber = _nextFreeInternalNumber++;
4644 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4645 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4646 purgeOldEntriesLocked();
4647 return NO_ERROR;
4648}
4649
4650/*===========================================================================
4651 * FUNCTION : generateStoreInternalFrameNumber
4652 *
4653 * DESCRIPTION: Method to associate a new internal request number independent
4654 * of any associate with framework requests
4655 *
4656 * PARAMETERS :
4657 * @internalFrame#: Output parameter which will have the newly generated internal
4658 *
4659 *
4660 * RETURN : Error code
4661 *
4662 *==========================================================================*/
4663int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4664{
4665 Mutex::Autolock lock(mRegistryLock);
4666 internalFrameNumber = _nextFreeInternalNumber++;
4667 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4668 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4669 purgeOldEntriesLocked();
4670 return NO_ERROR;
4671}
4672
4673/*===========================================================================
4674 * FUNCTION : getFrameworkFrameNumber
4675 *
4676 * DESCRIPTION: Method to query the framework framenumber given an internal #
4677 *
4678 * PARAMETERS :
4679 * @internalFrame#: Internal reference
4680 * @frameworkframenumber: Output parameter holding framework frame entry
4681 *
4682 * RETURN : Error code
4683 *
4684 *==========================================================================*/
4685int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4686 uint32_t &frameworkFrameNumber)
4687{
4688 Mutex::Autolock lock(mRegistryLock);
4689 auto itr = _register.find(internalFrameNumber);
4690 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004691 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004692 return -ENOENT;
4693 }
4694
4695 frameworkFrameNumber = itr->second;
4696 purgeOldEntriesLocked();
4697 return NO_ERROR;
4698}
Thierry Strudel3d639192016-09-09 11:52:26 -07004699
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004700status_t QCamera3HardwareInterface::fillPbStreamConfig(
4701 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4702 QCamera3Channel *channel, uint32_t streamIndex) {
4703 if (config == nullptr) {
4704 LOGE("%s: config is null", __FUNCTION__);
4705 return BAD_VALUE;
4706 }
4707
4708 if (channel == nullptr) {
4709 LOGE("%s: channel is null", __FUNCTION__);
4710 return BAD_VALUE;
4711 }
4712
4713 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4714 if (stream == nullptr) {
4715 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4716 return NAME_NOT_FOUND;
4717 }
4718
4719 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4720 if (streamInfo == nullptr) {
4721 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4722 return NAME_NOT_FOUND;
4723 }
4724
4725 config->id = pbStreamId;
4726 config->image.width = streamInfo->dim.width;
4727 config->image.height = streamInfo->dim.height;
4728 config->image.padding = 0;
4729 config->image.format = pbStreamFormat;
4730
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004731 uint32_t totalPlaneSize = 0;
4732
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004733 // Fill plane information.
4734 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4735 pbcamera::PlaneConfiguration plane;
4736 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4737 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4738 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004739
4740 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004741 }
4742
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004743 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004744 return OK;
4745}
4746
Thierry Strudel3d639192016-09-09 11:52:26 -07004747/*===========================================================================
4748 * FUNCTION : processCaptureRequest
4749 *
4750 * DESCRIPTION: process a capture request from camera service
4751 *
4752 * PARAMETERS :
4753 * @request : request from framework to process
4754 *
4755 * RETURN :
4756 *
4757 *==========================================================================*/
4758int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004759 camera3_capture_request_t *request,
4760 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004761{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004762 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004763 int rc = NO_ERROR;
4764 int32_t request_id;
4765 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004766 bool isVidBufRequested = false;
4767 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004768 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004769
4770 pthread_mutex_lock(&mMutex);
4771
4772 // Validate current state
4773 switch (mState) {
4774 case CONFIGURED:
4775 case STARTED:
4776 /* valid state */
4777 break;
4778
4779 case ERROR:
4780 pthread_mutex_unlock(&mMutex);
4781 handleCameraDeviceError();
4782 return -ENODEV;
4783
4784 default:
4785 LOGE("Invalid state %d", mState);
4786 pthread_mutex_unlock(&mMutex);
4787 return -ENODEV;
4788 }
4789
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004790 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004791 if (rc != NO_ERROR) {
4792 LOGE("incoming request is not valid");
4793 pthread_mutex_unlock(&mMutex);
4794 return rc;
4795 }
4796
4797 meta = request->settings;
4798
4799 // For first capture request, send capture intent, and
4800 // stream on all streams
4801 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004802 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004803 // send an unconfigure to the backend so that the isp
4804 // resources are deallocated
4805 if (!mFirstConfiguration) {
4806 cam_stream_size_info_t stream_config_info;
4807 int32_t hal_version = CAM_HAL_V3;
4808 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4809 stream_config_info.buffer_info.min_buffers =
4810 MIN_INFLIGHT_REQUESTS;
4811 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004812 m_bIs4KVideo ? 0 :
4813 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004814 clear_metadata_buffer(mParameters);
4815 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4816 CAM_INTF_PARM_HAL_VERSION, hal_version);
4817 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4818 CAM_INTF_META_STREAM_INFO, stream_config_info);
4819 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4820 mParameters);
4821 if (rc < 0) {
4822 LOGE("set_parms for unconfigure failed");
4823 pthread_mutex_unlock(&mMutex);
4824 return rc;
4825 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004826
Thierry Strudel3d639192016-09-09 11:52:26 -07004827 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004828 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004829 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004830 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004831 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004832 property_get("persist.camera.is_type", is_type_value, "4");
4833 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4834 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4835 property_get("persist.camera.is_type_preview", is_type_value, "4");
4836 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4837 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004838
4839 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4840 int32_t hal_version = CAM_HAL_V3;
4841 uint8_t captureIntent =
4842 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4843 mCaptureIntent = captureIntent;
4844 clear_metadata_buffer(mParameters);
4845 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4846 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4847 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004848 if (mFirstConfiguration) {
4849 // configure instant AEC
4850 // Instant AEC is a session based parameter and it is needed only
4851 // once per complete session after open camera.
4852 // i.e. This is set only once for the first capture request, after open camera.
4853 setInstantAEC(meta);
4854 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004855 uint8_t fwkVideoStabMode=0;
4856 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4857 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4858 }
4859
Xue Tuecac74e2017-04-17 13:58:15 -07004860 // If EIS setprop is enabled then only turn it on for video/preview
4861 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004862 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 int32_t vsMode;
4864 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4865 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4866 rc = BAD_VALUE;
4867 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004868 LOGD("setEis %d", setEis);
4869 bool eis3Supported = false;
4870 size_t count = IS_TYPE_MAX;
4871 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4872 for (size_t i = 0; i < count; i++) {
4873 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4874 eis3Supported = true;
4875 break;
4876 }
4877 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004878
4879 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004880 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004881 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4882 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004883 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4884 is_type = isTypePreview;
4885 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4886 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4887 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004888 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004889 } else {
4890 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004891 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004892 } else {
4893 is_type = IS_TYPE_NONE;
4894 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004895 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004896 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004897 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4898 }
4899 }
4900
4901 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4902 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4903
Thierry Strudel54dc9782017-02-15 12:12:10 -08004904 //Disable tintless only if the property is set to 0
4905 memset(prop, 0, sizeof(prop));
4906 property_get("persist.camera.tintless.enable", prop, "1");
4907 int32_t tintless_value = atoi(prop);
4908
Thierry Strudel3d639192016-09-09 11:52:26 -07004909 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4910 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004911
Thierry Strudel3d639192016-09-09 11:52:26 -07004912 //Disable CDS for HFR mode or if DIS/EIS is on.
4913 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4914 //after every configure_stream
4915 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4916 (m_bIsVideo)) {
4917 int32_t cds = CAM_CDS_MODE_OFF;
4918 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4919 CAM_INTF_PARM_CDS_MODE, cds))
4920 LOGE("Failed to disable CDS for HFR mode");
4921
4922 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004923
4924 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4925 uint8_t* use_av_timer = NULL;
4926
4927 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004928 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004929 use_av_timer = &m_debug_avtimer;
4930 }
4931 else{
4932 use_av_timer =
4933 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004934 if (use_av_timer) {
4935 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4936 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937 }
4938
4939 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4940 rc = BAD_VALUE;
4941 }
4942 }
4943
Thierry Strudel3d639192016-09-09 11:52:26 -07004944 setMobicat();
4945
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004946 uint8_t nrMode = 0;
4947 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4948 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4949 }
4950
Thierry Strudel3d639192016-09-09 11:52:26 -07004951 /* Set fps and hfr mode while sending meta stream info so that sensor
4952 * can configure appropriate streaming mode */
4953 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004954 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4955 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004956 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4957 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004958 if (rc == NO_ERROR) {
4959 int32_t max_fps =
4960 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004961 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004962 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4963 }
4964 /* For HFR, more buffers are dequeued upfront to improve the performance */
4965 if (mBatchSize) {
4966 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4967 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4968 }
4969 }
4970 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004971 LOGE("setHalFpsRange failed");
4972 }
4973 }
4974 if (meta.exists(ANDROID_CONTROL_MODE)) {
4975 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4976 rc = extractSceneMode(meta, metaMode, mParameters);
4977 if (rc != NO_ERROR) {
4978 LOGE("extractSceneMode failed");
4979 }
4980 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004981 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004982
Thierry Strudel04e026f2016-10-10 11:27:36 -07004983 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4984 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4985 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4986 rc = setVideoHdrMode(mParameters, vhdr);
4987 if (rc != NO_ERROR) {
4988 LOGE("setVideoHDR is failed");
4989 }
4990 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004991
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004992 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004993 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004994 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004995 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4996 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4997 sensorModeFullFov)) {
4998 rc = BAD_VALUE;
4999 }
5000 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005001 //TODO: validate the arguments, HSV scenemode should have only the
5002 //advertised fps ranges
5003
5004 /*set the capture intent, hal version, tintless, stream info,
5005 *and disenable parameters to the backend*/
5006 LOGD("set_parms META_STREAM_INFO " );
5007 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005008 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5009 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005010 mStreamConfigInfo.type[i],
5011 mStreamConfigInfo.stream_sizes[i].width,
5012 mStreamConfigInfo.stream_sizes[i].height,
5013 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005014 mStreamConfigInfo.format[i],
5015 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005016 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005017
Thierry Strudel3d639192016-09-09 11:52:26 -07005018 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5019 mParameters);
5020 if (rc < 0) {
5021 LOGE("set_parms failed for hal version, stream info");
5022 }
5023
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005024 cam_sensor_mode_info_t sensorModeInfo = {};
5025 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005026 if (rc != NO_ERROR) {
5027 LOGE("Failed to get sensor output size");
5028 pthread_mutex_unlock(&mMutex);
5029 goto error_exit;
5030 }
5031
5032 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5033 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005034 sensorModeInfo.active_array_size.width,
5035 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005036
5037 /* Set batchmode before initializing channel. Since registerBuffer
5038 * internally initializes some of the channels, better set batchmode
5039 * even before first register buffer */
5040 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5041 it != mStreamInfo.end(); it++) {
5042 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5043 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5044 && mBatchSize) {
5045 rc = channel->setBatchSize(mBatchSize);
5046 //Disable per frame map unmap for HFR/batchmode case
5047 rc |= channel->setPerFrameMapUnmap(false);
5048 if (NO_ERROR != rc) {
5049 LOGE("Channel init failed %d", rc);
5050 pthread_mutex_unlock(&mMutex);
5051 goto error_exit;
5052 }
5053 }
5054 }
5055
5056 //First initialize all streams
5057 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5058 it != mStreamInfo.end(); it++) {
5059 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005060
5061 /* Initial value of NR mode is needed before stream on */
5062 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005063 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5064 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005065 setEis) {
5066 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5067 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5068 is_type = mStreamConfigInfo.is_type[i];
5069 break;
5070 }
5071 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005072 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005073 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005074 rc = channel->initialize(IS_TYPE_NONE);
5075 }
5076 if (NO_ERROR != rc) {
5077 LOGE("Channel initialization failed %d", rc);
5078 pthread_mutex_unlock(&mMutex);
5079 goto error_exit;
5080 }
5081 }
5082
5083 if (mRawDumpChannel) {
5084 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5085 if (rc != NO_ERROR) {
5086 LOGE("Error: Raw Dump Channel init failed");
5087 pthread_mutex_unlock(&mMutex);
5088 goto error_exit;
5089 }
5090 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005091 if (mHdrPlusRawSrcChannel) {
5092 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5093 if (rc != NO_ERROR) {
5094 LOGE("Error: HDR+ RAW Source Channel init failed");
5095 pthread_mutex_unlock(&mMutex);
5096 goto error_exit;
5097 }
5098 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005099 if (mSupportChannel) {
5100 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5101 if (rc < 0) {
5102 LOGE("Support channel initialization failed");
5103 pthread_mutex_unlock(&mMutex);
5104 goto error_exit;
5105 }
5106 }
5107 if (mAnalysisChannel) {
5108 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5109 if (rc < 0) {
5110 LOGE("Analysis channel initialization failed");
5111 pthread_mutex_unlock(&mMutex);
5112 goto error_exit;
5113 }
5114 }
5115 if (mDummyBatchChannel) {
5116 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5117 if (rc < 0) {
5118 LOGE("mDummyBatchChannel setBatchSize failed");
5119 pthread_mutex_unlock(&mMutex);
5120 goto error_exit;
5121 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005122 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 if (rc < 0) {
5124 LOGE("mDummyBatchChannel initialization failed");
5125 pthread_mutex_unlock(&mMutex);
5126 goto error_exit;
5127 }
5128 }
5129
5130 // Set bundle info
5131 rc = setBundleInfo();
5132 if (rc < 0) {
5133 LOGE("setBundleInfo failed %d", rc);
5134 pthread_mutex_unlock(&mMutex);
5135 goto error_exit;
5136 }
5137
5138 //update settings from app here
5139 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5140 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5141 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5142 }
5143 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5144 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5145 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5146 }
5147 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5148 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5149 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5150
5151 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5152 (mLinkedCameraId != mCameraId) ) {
5153 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5154 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005155 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005156 goto error_exit;
5157 }
5158 }
5159
5160 // add bundle related cameras
5161 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5162 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005163 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5164 &m_pDualCamCmdPtr->bundle_info;
5165 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005166 if (mIsDeviceLinked)
5167 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5168 else
5169 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5170
5171 pthread_mutex_lock(&gCamLock);
5172
5173 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5174 LOGE("Dualcam: Invalid Session Id ");
5175 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005176 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005177 goto error_exit;
5178 }
5179
5180 if (mIsMainCamera == 1) {
5181 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5182 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005183 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005184 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005185 // related session id should be session id of linked session
5186 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5187 } else {
5188 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5189 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005190 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005191 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005192 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5193 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005194 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005195 pthread_mutex_unlock(&gCamLock);
5196
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005197 rc = mCameraHandle->ops->set_dual_cam_cmd(
5198 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005199 if (rc < 0) {
5200 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005201 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005202 goto error_exit;
5203 }
5204 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005205 goto no_error;
5206error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005207 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005208 return rc;
5209no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005210 mWokenUpByDaemon = false;
5211 mPendingLiveRequest = 0;
5212 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005213 }
5214
5215 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005216 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005217
5218 if (mFlushPerf) {
5219 //we cannot accept any requests during flush
5220 LOGE("process_capture_request cannot proceed during flush");
5221 pthread_mutex_unlock(&mMutex);
5222 return NO_ERROR; //should return an error
5223 }
5224
5225 if (meta.exists(ANDROID_REQUEST_ID)) {
5226 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5227 mCurrentRequestId = request_id;
5228 LOGD("Received request with id: %d", request_id);
5229 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5230 LOGE("Unable to find request id field, \
5231 & no previous id available");
5232 pthread_mutex_unlock(&mMutex);
5233 return NAME_NOT_FOUND;
5234 } else {
5235 LOGD("Re-using old request id");
5236 request_id = mCurrentRequestId;
5237 }
5238
5239 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5240 request->num_output_buffers,
5241 request->input_buffer,
5242 frameNumber);
5243 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005244 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005245 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005246 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005247 uint32_t snapshotStreamId = 0;
5248 for (size_t i = 0; i < request->num_output_buffers; i++) {
5249 const camera3_stream_buffer_t& output = request->output_buffers[i];
5250 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5251
Emilian Peev7650c122017-01-19 08:24:33 -08005252 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5253 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005254 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005255 blob_request = 1;
5256 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5257 }
5258
5259 if (output.acquire_fence != -1) {
5260 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5261 close(output.acquire_fence);
5262 if (rc != OK) {
5263 LOGE("sync wait failed %d", rc);
5264 pthread_mutex_unlock(&mMutex);
5265 return rc;
5266 }
5267 }
5268
Emilian Peev0f3c3162017-03-15 12:57:46 +00005269 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5270 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005271 depthRequestPresent = true;
5272 continue;
5273 }
5274
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005275 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005276 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005277
5278 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5279 isVidBufRequested = true;
5280 }
5281 }
5282
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005283 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5284 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5285 itr++) {
5286 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5287 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5288 channel->getStreamID(channel->getStreamTypeMask());
5289
5290 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5291 isVidBufRequested = true;
5292 }
5293 }
5294
Thierry Strudel3d639192016-09-09 11:52:26 -07005295 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005296 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005297 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005298 }
5299 if (blob_request && mRawDumpChannel) {
5300 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005301 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005302 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005303 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005304 }
5305
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005306 {
5307 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5308 // Request a RAW buffer if
5309 // 1. mHdrPlusRawSrcChannel is valid.
5310 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5311 // 3. There is no pending HDR+ request.
5312 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5313 mHdrPlusPendingRequests.size() == 0) {
5314 streamsArray.stream_request[streamsArray.num_streams].streamID =
5315 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5316 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5317 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005318 }
5319
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005320 //extract capture intent
5321 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5322 mCaptureIntent =
5323 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5324 }
5325
5326 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5327 mCacMode =
5328 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5329 }
5330
5331 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005332 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005333
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005334 {
5335 Mutex::Autolock l(gHdrPlusClientLock);
5336 // If this request has a still capture intent, try to submit an HDR+ request.
5337 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5338 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5339 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5340 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005341 }
5342
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005343 if (hdrPlusRequest) {
5344 // For a HDR+ request, just set the frame parameters.
5345 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5346 if (rc < 0) {
5347 LOGE("fail to set frame parameters");
5348 pthread_mutex_unlock(&mMutex);
5349 return rc;
5350 }
5351 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005352 /* Parse the settings:
5353 * - For every request in NORMAL MODE
5354 * - For every request in HFR mode during preview only case
5355 * - For first request of every batch in HFR mode during video
5356 * recording. In batchmode the same settings except frame number is
5357 * repeated in each request of the batch.
5358 */
5359 if (!mBatchSize ||
5360 (mBatchSize && !isVidBufRequested) ||
5361 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005362 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005363 if (rc < 0) {
5364 LOGE("fail to set frame parameters");
5365 pthread_mutex_unlock(&mMutex);
5366 return rc;
5367 }
5368 }
5369 /* For batchMode HFR, setFrameParameters is not called for every
5370 * request. But only frame number of the latest request is parsed.
5371 * Keep track of first and last frame numbers in a batch so that
5372 * metadata for the frame numbers of batch can be duplicated in
5373 * handleBatchMetadta */
5374 if (mBatchSize) {
5375 if (!mToBeQueuedVidBufs) {
5376 //start of the batch
5377 mFirstFrameNumberInBatch = request->frame_number;
5378 }
5379 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5380 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5381 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005382 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005383 return BAD_VALUE;
5384 }
5385 }
5386 if (mNeedSensorRestart) {
5387 /* Unlock the mutex as restartSensor waits on the channels to be
5388 * stopped, which in turn calls stream callback functions -
5389 * handleBufferWithLock and handleMetadataWithLock */
5390 pthread_mutex_unlock(&mMutex);
5391 rc = dynamicUpdateMetaStreamInfo();
5392 if (rc != NO_ERROR) {
5393 LOGE("Restarting the sensor failed");
5394 return BAD_VALUE;
5395 }
5396 mNeedSensorRestart = false;
5397 pthread_mutex_lock(&mMutex);
5398 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005399 if(mResetInstantAEC) {
5400 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5401 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5402 mResetInstantAEC = false;
5403 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005404 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005405 if (request->input_buffer->acquire_fence != -1) {
5406 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5407 close(request->input_buffer->acquire_fence);
5408 if (rc != OK) {
5409 LOGE("input buffer sync wait failed %d", rc);
5410 pthread_mutex_unlock(&mMutex);
5411 return rc;
5412 }
5413 }
5414 }
5415
5416 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5417 mLastCustIntentFrmNum = frameNumber;
5418 }
5419 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005420 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005421 pendingRequestIterator latestRequest;
5422 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005423 pendingRequest.num_buffers = depthRequestPresent ?
5424 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005425 pendingRequest.request_id = request_id;
5426 pendingRequest.blob_request = blob_request;
5427 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005428 if (request->input_buffer) {
5429 pendingRequest.input_buffer =
5430 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5431 *(pendingRequest.input_buffer) = *(request->input_buffer);
5432 pInputBuffer = pendingRequest.input_buffer;
5433 } else {
5434 pendingRequest.input_buffer = NULL;
5435 pInputBuffer = NULL;
5436 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005437 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005438
5439 pendingRequest.pipeline_depth = 0;
5440 pendingRequest.partial_result_cnt = 0;
5441 extractJpegMetadata(mCurJpegMeta, request);
5442 pendingRequest.jpegMetadata = mCurJpegMeta;
5443 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005444 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005445 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5446 mHybridAeEnable =
5447 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5448 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005449
5450 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5451 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005452 /* DevCamDebug metadata processCaptureRequest */
5453 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5454 mDevCamDebugMetaEnable =
5455 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5456 }
5457 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5458 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005459
5460 //extract CAC info
5461 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5462 mCacMode =
5463 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5464 }
5465 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005466 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005467
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005468 // extract enableZsl info
5469 if (gExposeEnableZslKey) {
5470 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5471 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5472 mZslEnabled = pendingRequest.enableZsl;
5473 } else {
5474 pendingRequest.enableZsl = mZslEnabled;
5475 }
5476 }
5477
Thierry Strudel3d639192016-09-09 11:52:26 -07005478 PendingBuffersInRequest bufsForCurRequest;
5479 bufsForCurRequest.frame_number = frameNumber;
5480 // Mark current timestamp for the new request
5481 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005482 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005483
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005484 if (hdrPlusRequest) {
5485 // Save settings for this request.
5486 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5487 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5488
5489 // Add to pending HDR+ request queue.
5490 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5491 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5492
5493 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5494 }
5495
Thierry Strudel3d639192016-09-09 11:52:26 -07005496 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005497 if ((request->output_buffers[i].stream->data_space ==
5498 HAL_DATASPACE_DEPTH) &&
5499 (HAL_PIXEL_FORMAT_BLOB ==
5500 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005501 continue;
5502 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005503 RequestedBufferInfo requestedBuf;
5504 memset(&requestedBuf, 0, sizeof(requestedBuf));
5505 requestedBuf.stream = request->output_buffers[i].stream;
5506 requestedBuf.buffer = NULL;
5507 pendingRequest.buffers.push_back(requestedBuf);
5508
5509 // Add to buffer handle the pending buffers list
5510 PendingBufferInfo bufferInfo;
5511 bufferInfo.buffer = request->output_buffers[i].buffer;
5512 bufferInfo.stream = request->output_buffers[i].stream;
5513 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5514 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5515 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5516 frameNumber, bufferInfo.buffer,
5517 channel->getStreamTypeMask(), bufferInfo.stream->format);
5518 }
5519 // Add this request packet into mPendingBuffersMap
5520 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5521 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5522 mPendingBuffersMap.get_num_overall_buffers());
5523
5524 latestRequest = mPendingRequestsList.insert(
5525 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005526
5527 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5528 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005529 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005530 for (size_t i = 0; i < request->num_output_buffers; i++) {
5531 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5532 }
5533
Thierry Strudel3d639192016-09-09 11:52:26 -07005534 if(mFlush) {
5535 LOGI("mFlush is true");
5536 pthread_mutex_unlock(&mMutex);
5537 return NO_ERROR;
5538 }
5539
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005540 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5541 // channel.
5542 if (!hdrPlusRequest) {
5543 int indexUsed;
5544 // Notify metadata channel we receive a request
5545 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005546
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005547 if(request->input_buffer != NULL){
5548 LOGD("Input request, frame_number %d", frameNumber);
5549 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5550 if (NO_ERROR != rc) {
5551 LOGE("fail to set reproc parameters");
5552 pthread_mutex_unlock(&mMutex);
5553 return rc;
5554 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005555 }
5556
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005557 // Call request on other streams
5558 uint32_t streams_need_metadata = 0;
5559 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5560 for (size_t i = 0; i < request->num_output_buffers; i++) {
5561 const camera3_stream_buffer_t& output = request->output_buffers[i];
5562 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5563
5564 if (channel == NULL) {
5565 LOGW("invalid channel pointer for stream");
5566 continue;
5567 }
5568
5569 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5570 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5571 output.buffer, request->input_buffer, frameNumber);
5572 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005573 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005574 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5575 if (rc < 0) {
5576 LOGE("Fail to request on picture channel");
5577 pthread_mutex_unlock(&mMutex);
5578 return rc;
5579 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005580 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005581 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5582 assert(NULL != mDepthChannel);
5583 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005584
Emilian Peev7650c122017-01-19 08:24:33 -08005585 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5586 if (rc < 0) {
5587 LOGE("Fail to map on depth buffer");
5588 pthread_mutex_unlock(&mMutex);
5589 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005590 }
Emilian Peev7650c122017-01-19 08:24:33 -08005591 } else {
5592 LOGD("snapshot request with buffer %p, frame_number %d",
5593 output.buffer, frameNumber);
5594 if (!request->settings) {
5595 rc = channel->request(output.buffer, frameNumber,
5596 NULL, mPrevParameters, indexUsed);
5597 } else {
5598 rc = channel->request(output.buffer, frameNumber,
5599 NULL, mParameters, indexUsed);
5600 }
5601 if (rc < 0) {
5602 LOGE("Fail to request on picture channel");
5603 pthread_mutex_unlock(&mMutex);
5604 return rc;
5605 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005606
Emilian Peev7650c122017-01-19 08:24:33 -08005607 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5608 uint32_t j = 0;
5609 for (j = 0; j < streamsArray.num_streams; j++) {
5610 if (streamsArray.stream_request[j].streamID == streamId) {
5611 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5612 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5613 else
5614 streamsArray.stream_request[j].buf_index = indexUsed;
5615 break;
5616 }
5617 }
5618 if (j == streamsArray.num_streams) {
5619 LOGE("Did not find matching stream to update index");
5620 assert(0);
5621 }
5622
5623 pendingBufferIter->need_metadata = true;
5624 streams_need_metadata++;
5625 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005626 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005627 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5628 bool needMetadata = false;
5629 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5630 rc = yuvChannel->request(output.buffer, frameNumber,
5631 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5632 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005633 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005634 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005635 pthread_mutex_unlock(&mMutex);
5636 return rc;
5637 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005638
5639 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5640 uint32_t j = 0;
5641 for (j = 0; j < streamsArray.num_streams; j++) {
5642 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005643 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5644 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5645 else
5646 streamsArray.stream_request[j].buf_index = indexUsed;
5647 break;
5648 }
5649 }
5650 if (j == streamsArray.num_streams) {
5651 LOGE("Did not find matching stream to update index");
5652 assert(0);
5653 }
5654
5655 pendingBufferIter->need_metadata = needMetadata;
5656 if (needMetadata)
5657 streams_need_metadata += 1;
5658 LOGD("calling YUV channel request, need_metadata is %d",
5659 needMetadata);
5660 } else {
5661 LOGD("request with buffer %p, frame_number %d",
5662 output.buffer, frameNumber);
5663
5664 rc = channel->request(output.buffer, frameNumber, indexUsed);
5665
5666 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5667 uint32_t j = 0;
5668 for (j = 0; j < streamsArray.num_streams; j++) {
5669 if (streamsArray.stream_request[j].streamID == streamId) {
5670 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5671 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5672 else
5673 streamsArray.stream_request[j].buf_index = indexUsed;
5674 break;
5675 }
5676 }
5677 if (j == streamsArray.num_streams) {
5678 LOGE("Did not find matching stream to update index");
5679 assert(0);
5680 }
5681
5682 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5683 && mBatchSize) {
5684 mToBeQueuedVidBufs++;
5685 if (mToBeQueuedVidBufs == mBatchSize) {
5686 channel->queueBatchBuf();
5687 }
5688 }
5689 if (rc < 0) {
5690 LOGE("request failed");
5691 pthread_mutex_unlock(&mMutex);
5692 return rc;
5693 }
5694 }
5695 pendingBufferIter++;
5696 }
5697
5698 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5699 itr++) {
5700 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5701
5702 if (channel == NULL) {
5703 LOGE("invalid channel pointer for stream");
5704 assert(0);
5705 return BAD_VALUE;
5706 }
5707
5708 InternalRequest requestedStream;
5709 requestedStream = (*itr);
5710
5711
5712 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5713 LOGD("snapshot request internally input buffer %p, frame_number %d",
5714 request->input_buffer, frameNumber);
5715 if(request->input_buffer != NULL){
5716 rc = channel->request(NULL, frameNumber,
5717 pInputBuffer, &mReprocMeta, indexUsed, true,
5718 requestedStream.meteringOnly);
5719 if (rc < 0) {
5720 LOGE("Fail to request on picture channel");
5721 pthread_mutex_unlock(&mMutex);
5722 return rc;
5723 }
5724 } else {
5725 LOGD("snapshot request with frame_number %d", frameNumber);
5726 if (!request->settings) {
5727 rc = channel->request(NULL, frameNumber,
5728 NULL, mPrevParameters, indexUsed, true,
5729 requestedStream.meteringOnly);
5730 } else {
5731 rc = channel->request(NULL, frameNumber,
5732 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5733 }
5734 if (rc < 0) {
5735 LOGE("Fail to request on picture channel");
5736 pthread_mutex_unlock(&mMutex);
5737 return rc;
5738 }
5739
5740 if ((*itr).meteringOnly != 1) {
5741 requestedStream.need_metadata = 1;
5742 streams_need_metadata++;
5743 }
5744 }
5745
5746 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5747 uint32_t j = 0;
5748 for (j = 0; j < streamsArray.num_streams; j++) {
5749 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005750 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5751 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5752 else
5753 streamsArray.stream_request[j].buf_index = indexUsed;
5754 break;
5755 }
5756 }
5757 if (j == streamsArray.num_streams) {
5758 LOGE("Did not find matching stream to update index");
5759 assert(0);
5760 }
5761
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005762 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005763 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005764 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005765 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005766 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005767 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005768 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005769
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005770 //If 2 streams have need_metadata set to true, fail the request, unless
5771 //we copy/reference count the metadata buffer
5772 if (streams_need_metadata > 1) {
5773 LOGE("not supporting request in which two streams requires"
5774 " 2 HAL metadata for reprocessing");
5775 pthread_mutex_unlock(&mMutex);
5776 return -EINVAL;
5777 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005778
Emilian Peev656e4fa2017-06-02 16:47:04 +01005779 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5780 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5781 if (depthRequestPresent && mDepthChannel) {
5782 if (request->settings) {
5783 camera_metadata_ro_entry entry;
5784 if (find_camera_metadata_ro_entry(request->settings,
5785 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5786 if (entry.data.u8[0]) {
5787 pdafEnable = CAM_PD_DATA_ENABLED;
5788 } else {
5789 pdafEnable = CAM_PD_DATA_SKIP;
5790 }
5791 mDepthCloudMode = pdafEnable;
5792 } else {
5793 pdafEnable = mDepthCloudMode;
5794 }
5795 } else {
5796 pdafEnable = mDepthCloudMode;
5797 }
5798 }
5799
Emilian Peev7650c122017-01-19 08:24:33 -08005800 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5801 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5802 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5803 pthread_mutex_unlock(&mMutex);
5804 return BAD_VALUE;
5805 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005806
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005807 if (request->input_buffer == NULL) {
5808 /* Set the parameters to backend:
5809 * - For every request in NORMAL MODE
5810 * - For every request in HFR mode during preview only case
5811 * - Once every batch in HFR mode during video recording
5812 */
5813 if (!mBatchSize ||
5814 (mBatchSize && !isVidBufRequested) ||
5815 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5816 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5817 mBatchSize, isVidBufRequested,
5818 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005819
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005820 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5821 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5822 uint32_t m = 0;
5823 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5824 if (streamsArray.stream_request[k].streamID ==
5825 mBatchedStreamsArray.stream_request[m].streamID)
5826 break;
5827 }
5828 if (m == mBatchedStreamsArray.num_streams) {
5829 mBatchedStreamsArray.stream_request\
5830 [mBatchedStreamsArray.num_streams].streamID =
5831 streamsArray.stream_request[k].streamID;
5832 mBatchedStreamsArray.stream_request\
5833 [mBatchedStreamsArray.num_streams].buf_index =
5834 streamsArray.stream_request[k].buf_index;
5835 mBatchedStreamsArray.num_streams =
5836 mBatchedStreamsArray.num_streams + 1;
5837 }
5838 }
5839 streamsArray = mBatchedStreamsArray;
5840 }
5841 /* Update stream id of all the requested buffers */
5842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5843 streamsArray)) {
5844 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005845 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005846 return BAD_VALUE;
5847 }
5848
5849 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5850 mParameters);
5851 if (rc < 0) {
5852 LOGE("set_parms failed");
5853 }
5854 /* reset to zero coz, the batch is queued */
5855 mToBeQueuedVidBufs = 0;
5856 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5857 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5858 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005859 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5860 uint32_t m = 0;
5861 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5862 if (streamsArray.stream_request[k].streamID ==
5863 mBatchedStreamsArray.stream_request[m].streamID)
5864 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005865 }
5866 if (m == mBatchedStreamsArray.num_streams) {
5867 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5868 streamID = streamsArray.stream_request[k].streamID;
5869 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5870 buf_index = streamsArray.stream_request[k].buf_index;
5871 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5872 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005873 }
5874 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005875 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005876
5877 // Start all streams after the first setting is sent, so that the
5878 // setting can be applied sooner: (0 + apply_delay)th frame.
5879 if (mState == CONFIGURED && mChannelHandle) {
5880 //Then start them.
5881 LOGH("Start META Channel");
5882 rc = mMetadataChannel->start();
5883 if (rc < 0) {
5884 LOGE("META channel start failed");
5885 pthread_mutex_unlock(&mMutex);
5886 return rc;
5887 }
5888
5889 if (mAnalysisChannel) {
5890 rc = mAnalysisChannel->start();
5891 if (rc < 0) {
5892 LOGE("Analysis channel start failed");
5893 mMetadataChannel->stop();
5894 pthread_mutex_unlock(&mMutex);
5895 return rc;
5896 }
5897 }
5898
5899 if (mSupportChannel) {
5900 rc = mSupportChannel->start();
5901 if (rc < 0) {
5902 LOGE("Support channel start failed");
5903 mMetadataChannel->stop();
5904 /* Although support and analysis are mutually exclusive today
5905 adding it in anycase for future proofing */
5906 if (mAnalysisChannel) {
5907 mAnalysisChannel->stop();
5908 }
5909 pthread_mutex_unlock(&mMutex);
5910 return rc;
5911 }
5912 }
5913 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5914 it != mStreamInfo.end(); it++) {
5915 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5916 LOGH("Start Processing Channel mask=%d",
5917 channel->getStreamTypeMask());
5918 rc = channel->start();
5919 if (rc < 0) {
5920 LOGE("channel start failed");
5921 pthread_mutex_unlock(&mMutex);
5922 return rc;
5923 }
5924 }
5925
5926 if (mRawDumpChannel) {
5927 LOGD("Starting raw dump stream");
5928 rc = mRawDumpChannel->start();
5929 if (rc != NO_ERROR) {
5930 LOGE("Error Starting Raw Dump Channel");
5931 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5932 it != mStreamInfo.end(); it++) {
5933 QCamera3Channel *channel =
5934 (QCamera3Channel *)(*it)->stream->priv;
5935 LOGH("Stopping Processing Channel mask=%d",
5936 channel->getStreamTypeMask());
5937 channel->stop();
5938 }
5939 if (mSupportChannel)
5940 mSupportChannel->stop();
5941 if (mAnalysisChannel) {
5942 mAnalysisChannel->stop();
5943 }
5944 mMetadataChannel->stop();
5945 pthread_mutex_unlock(&mMutex);
5946 return rc;
5947 }
5948 }
5949
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005950 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005951 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005952 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005953 if (rc != NO_ERROR) {
5954 LOGE("start_channel failed %d", rc);
5955 pthread_mutex_unlock(&mMutex);
5956 return rc;
5957 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005958
5959 {
5960 // Configure Easel for stream on.
5961 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005962
5963 // Now that sensor mode should have been selected, get the selected sensor mode
5964 // info.
5965 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5966 getCurrentSensorModeInfo(mSensorModeInfo);
5967
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005968 if (EaselManagerClientOpened) {
5969 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chena6c99062017-05-23 13:45:06 -07005970 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5971 /*enableIpu*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005972 if (rc != OK) {
5973 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5974 mCameraId, mSensorModeInfo.op_pixel_clk);
5975 pthread_mutex_unlock(&mMutex);
5976 return rc;
5977 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005978 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005979 }
5980 }
5981
5982 // Start sensor streaming.
5983 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5984 mChannelHandle);
5985 if (rc != NO_ERROR) {
5986 LOGE("start_sensor_stream_on failed %d", rc);
5987 pthread_mutex_unlock(&mMutex);
5988 return rc;
5989 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005990 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005991 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005992 }
5993
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005994 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07005995 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005996 Mutex::Autolock l(gHdrPlusClientLock);
5997 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5998 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5999 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6000 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6001 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
6002 rc = enableHdrPlusModeLocked();
6003 if (rc != OK) {
6004 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6005 pthread_mutex_unlock(&mMutex);
6006 return rc;
6007 }
6008
6009 mFirstPreviewIntentSeen = true;
6010 }
6011 }
6012
Thierry Strudel3d639192016-09-09 11:52:26 -07006013 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6014
6015 mState = STARTED;
6016 // Added a timed condition wait
6017 struct timespec ts;
6018 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006019 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006020 if (rc < 0) {
6021 isValidTimeout = 0;
6022 LOGE("Error reading the real time clock!!");
6023 }
6024 else {
6025 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006026 int64_t timeout = 5;
6027 {
6028 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6029 // If there is a pending HDR+ request, the following requests may be blocked until the
6030 // HDR+ request is done. So allow a longer timeout.
6031 if (mHdrPlusPendingRequests.size() > 0) {
6032 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6033 }
6034 }
6035 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006036 }
6037 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006038 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006039 (mState != ERROR) && (mState != DEINIT)) {
6040 if (!isValidTimeout) {
6041 LOGD("Blocking on conditional wait");
6042 pthread_cond_wait(&mRequestCond, &mMutex);
6043 }
6044 else {
6045 LOGD("Blocking on timed conditional wait");
6046 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6047 if (rc == ETIMEDOUT) {
6048 rc = -ENODEV;
6049 LOGE("Unblocked on timeout!!!!");
6050 break;
6051 }
6052 }
6053 LOGD("Unblocked");
6054 if (mWokenUpByDaemon) {
6055 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006056 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006057 break;
6058 }
6059 }
6060 pthread_mutex_unlock(&mMutex);
6061
6062 return rc;
6063}
6064
6065/*===========================================================================
6066 * FUNCTION : dump
6067 *
6068 * DESCRIPTION:
6069 *
6070 * PARAMETERS :
6071 *
6072 *
6073 * RETURN :
6074 *==========================================================================*/
6075void QCamera3HardwareInterface::dump(int fd)
6076{
6077 pthread_mutex_lock(&mMutex);
6078 dprintf(fd, "\n Camera HAL3 information Begin \n");
6079
6080 dprintf(fd, "\nNumber of pending requests: %zu \n",
6081 mPendingRequestsList.size());
6082 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6083 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6084 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6085 for(pendingRequestIterator i = mPendingRequestsList.begin();
6086 i != mPendingRequestsList.end(); i++) {
6087 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6088 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6089 i->input_buffer);
6090 }
6091 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6092 mPendingBuffersMap.get_num_overall_buffers());
6093 dprintf(fd, "-------+------------------\n");
6094 dprintf(fd, " Frame | Stream type mask \n");
6095 dprintf(fd, "-------+------------------\n");
6096 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6097 for(auto &j : req.mPendingBufferList) {
6098 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6099 dprintf(fd, " %5d | %11d \n",
6100 req.frame_number, channel->getStreamTypeMask());
6101 }
6102 }
6103 dprintf(fd, "-------+------------------\n");
6104
6105 dprintf(fd, "\nPending frame drop list: %zu\n",
6106 mPendingFrameDropList.size());
6107 dprintf(fd, "-------+-----------\n");
6108 dprintf(fd, " Frame | Stream ID \n");
6109 dprintf(fd, "-------+-----------\n");
6110 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6111 i != mPendingFrameDropList.end(); i++) {
6112 dprintf(fd, " %5d | %9d \n",
6113 i->frame_number, i->stream_ID);
6114 }
6115 dprintf(fd, "-------+-----------\n");
6116
6117 dprintf(fd, "\n Camera HAL3 information End \n");
6118
6119 /* use dumpsys media.camera as trigger to send update debug level event */
6120 mUpdateDebugLevel = true;
6121 pthread_mutex_unlock(&mMutex);
6122 return;
6123}
6124
6125/*===========================================================================
6126 * FUNCTION : flush
6127 *
6128 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6129 * conditionally restarts channels
6130 *
6131 * PARAMETERS :
6132 * @ restartChannels: re-start all channels
6133 *
6134 *
6135 * RETURN :
6136 * 0 on success
6137 * Error code on failure
6138 *==========================================================================*/
6139int QCamera3HardwareInterface::flush(bool restartChannels)
6140{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006141 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006142 int32_t rc = NO_ERROR;
6143
6144 LOGD("Unblocking Process Capture Request");
6145 pthread_mutex_lock(&mMutex);
6146 mFlush = true;
6147 pthread_mutex_unlock(&mMutex);
6148
6149 rc = stopAllChannels();
6150 // unlink of dualcam
6151 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006152 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6153 &m_pDualCamCmdPtr->bundle_info;
6154 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6156 pthread_mutex_lock(&gCamLock);
6157
6158 if (mIsMainCamera == 1) {
6159 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6160 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006161 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006162 // related session id should be session id of linked session
6163 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6164 } else {
6165 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6166 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006167 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006168 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6169 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006170 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006171 pthread_mutex_unlock(&gCamLock);
6172
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006173 rc = mCameraHandle->ops->set_dual_cam_cmd(
6174 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006175 if (rc < 0) {
6176 LOGE("Dualcam: Unlink failed, but still proceed to close");
6177 }
6178 }
6179
6180 if (rc < 0) {
6181 LOGE("stopAllChannels failed");
6182 return rc;
6183 }
6184 if (mChannelHandle) {
6185 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6186 mChannelHandle);
6187 }
6188
6189 // Reset bundle info
6190 rc = setBundleInfo();
6191 if (rc < 0) {
6192 LOGE("setBundleInfo failed %d", rc);
6193 return rc;
6194 }
6195
6196 // Mutex Lock
6197 pthread_mutex_lock(&mMutex);
6198
6199 // Unblock process_capture_request
6200 mPendingLiveRequest = 0;
6201 pthread_cond_signal(&mRequestCond);
6202
6203 rc = notifyErrorForPendingRequests();
6204 if (rc < 0) {
6205 LOGE("notifyErrorForPendingRequests failed");
6206 pthread_mutex_unlock(&mMutex);
6207 return rc;
6208 }
6209
6210 mFlush = false;
6211
6212 // Start the Streams/Channels
6213 if (restartChannels) {
6214 rc = startAllChannels();
6215 if (rc < 0) {
6216 LOGE("startAllChannels failed");
6217 pthread_mutex_unlock(&mMutex);
6218 return rc;
6219 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006220 if (mChannelHandle) {
6221 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006222 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006223 if (rc < 0) {
6224 LOGE("start_channel failed");
6225 pthread_mutex_unlock(&mMutex);
6226 return rc;
6227 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006228 }
6229 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006230 pthread_mutex_unlock(&mMutex);
6231
6232 return 0;
6233}
6234
6235/*===========================================================================
6236 * FUNCTION : flushPerf
6237 *
6238 * DESCRIPTION: This is the performance optimization version of flush that does
6239 * not use stream off, rather flushes the system
6240 *
6241 * PARAMETERS :
6242 *
6243 *
6244 * RETURN : 0 : success
6245 * -EINVAL: input is malformed (device is not valid)
6246 * -ENODEV: if the device has encountered a serious error
6247 *==========================================================================*/
6248int QCamera3HardwareInterface::flushPerf()
6249{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006250 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006251 int32_t rc = 0;
6252 struct timespec timeout;
6253 bool timed_wait = false;
6254
6255 pthread_mutex_lock(&mMutex);
6256 mFlushPerf = true;
6257 mPendingBuffersMap.numPendingBufsAtFlush =
6258 mPendingBuffersMap.get_num_overall_buffers();
6259 LOGD("Calling flush. Wait for %d buffers to return",
6260 mPendingBuffersMap.numPendingBufsAtFlush);
6261
6262 /* send the flush event to the backend */
6263 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6264 if (rc < 0) {
6265 LOGE("Error in flush: IOCTL failure");
6266 mFlushPerf = false;
6267 pthread_mutex_unlock(&mMutex);
6268 return -ENODEV;
6269 }
6270
6271 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6272 LOGD("No pending buffers in HAL, return flush");
6273 mFlushPerf = false;
6274 pthread_mutex_unlock(&mMutex);
6275 return rc;
6276 }
6277
6278 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006279 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006280 if (rc < 0) {
6281 LOGE("Error reading the real time clock, cannot use timed wait");
6282 } else {
6283 timeout.tv_sec += FLUSH_TIMEOUT;
6284 timed_wait = true;
6285 }
6286
6287 //Block on conditional variable
6288 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6289 LOGD("Waiting on mBuffersCond");
6290 if (!timed_wait) {
6291 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6292 if (rc != 0) {
6293 LOGE("pthread_cond_wait failed due to rc = %s",
6294 strerror(rc));
6295 break;
6296 }
6297 } else {
6298 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6299 if (rc != 0) {
6300 LOGE("pthread_cond_timedwait failed due to rc = %s",
6301 strerror(rc));
6302 break;
6303 }
6304 }
6305 }
6306 if (rc != 0) {
6307 mFlushPerf = false;
6308 pthread_mutex_unlock(&mMutex);
6309 return -ENODEV;
6310 }
6311
6312 LOGD("Received buffers, now safe to return them");
6313
6314 //make sure the channels handle flush
6315 //currently only required for the picture channel to release snapshot resources
6316 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6317 it != mStreamInfo.end(); it++) {
6318 QCamera3Channel *channel = (*it)->channel;
6319 if (channel) {
6320 rc = channel->flush();
6321 if (rc) {
6322 LOGE("Flushing the channels failed with error %d", rc);
6323 // even though the channel flush failed we need to continue and
6324 // return the buffers we have to the framework, however the return
6325 // value will be an error
6326 rc = -ENODEV;
6327 }
6328 }
6329 }
6330
6331 /* notify the frameworks and send errored results */
6332 rc = notifyErrorForPendingRequests();
6333 if (rc < 0) {
6334 LOGE("notifyErrorForPendingRequests failed");
6335 pthread_mutex_unlock(&mMutex);
6336 return rc;
6337 }
6338
6339 //unblock process_capture_request
6340 mPendingLiveRequest = 0;
6341 unblockRequestIfNecessary();
6342
6343 mFlushPerf = false;
6344 pthread_mutex_unlock(&mMutex);
6345 LOGD ("Flush Operation complete. rc = %d", rc);
6346 return rc;
6347}
6348
6349/*===========================================================================
6350 * FUNCTION : handleCameraDeviceError
6351 *
6352 * DESCRIPTION: This function calls internal flush and notifies the error to
6353 * framework and updates the state variable.
6354 *
6355 * PARAMETERS : None
6356 *
6357 * RETURN : NO_ERROR on Success
6358 * Error code on failure
6359 *==========================================================================*/
6360int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6361{
6362 int32_t rc = NO_ERROR;
6363
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006364 {
6365 Mutex::Autolock lock(mFlushLock);
6366 pthread_mutex_lock(&mMutex);
6367 if (mState != ERROR) {
6368 //if mState != ERROR, nothing to be done
6369 pthread_mutex_unlock(&mMutex);
6370 return NO_ERROR;
6371 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006372 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006373
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006374 rc = flush(false /* restart channels */);
6375 if (NO_ERROR != rc) {
6376 LOGE("internal flush to handle mState = ERROR failed");
6377 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006378
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006379 pthread_mutex_lock(&mMutex);
6380 mState = DEINIT;
6381 pthread_mutex_unlock(&mMutex);
6382 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006383
6384 camera3_notify_msg_t notify_msg;
6385 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6386 notify_msg.type = CAMERA3_MSG_ERROR;
6387 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6388 notify_msg.message.error.error_stream = NULL;
6389 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006390 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006391
6392 return rc;
6393}
6394
6395/*===========================================================================
6396 * FUNCTION : captureResultCb
6397 *
6398 * DESCRIPTION: Callback handler for all capture result
6399 * (streams, as well as metadata)
6400 *
6401 * PARAMETERS :
6402 * @metadata : metadata information
6403 * @buffer : actual gralloc buffer to be returned to frameworks.
6404 * NULL if metadata.
6405 *
6406 * RETURN : NONE
6407 *==========================================================================*/
6408void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6409 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6410{
6411 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006412 pthread_mutex_lock(&mMutex);
6413 uint8_t batchSize = mBatchSize;
6414 pthread_mutex_unlock(&mMutex);
6415 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006416 handleBatchMetadata(metadata_buf,
6417 true /* free_and_bufdone_meta_buf */);
6418 } else { /* mBatchSize = 0 */
6419 hdrPlusPerfLock(metadata_buf);
6420 pthread_mutex_lock(&mMutex);
6421 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006422 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006423 true /* last urgent frame of batch metadata */,
6424 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006425 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006426 pthread_mutex_unlock(&mMutex);
6427 }
6428 } else if (isInputBuffer) {
6429 pthread_mutex_lock(&mMutex);
6430 handleInputBufferWithLock(frame_number);
6431 pthread_mutex_unlock(&mMutex);
6432 } else {
6433 pthread_mutex_lock(&mMutex);
6434 handleBufferWithLock(buffer, frame_number);
6435 pthread_mutex_unlock(&mMutex);
6436 }
6437 return;
6438}
6439
6440/*===========================================================================
6441 * FUNCTION : getReprocessibleOutputStreamId
6442 *
6443 * DESCRIPTION: Get source output stream id for the input reprocess stream
6444 * based on size and format, which would be the largest
6445 * output stream if an input stream exists.
6446 *
6447 * PARAMETERS :
6448 * @id : return the stream id if found
6449 *
6450 * RETURN : int32_t type of status
6451 * NO_ERROR -- success
6452 * none-zero failure code
6453 *==========================================================================*/
6454int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6455{
6456 /* check if any output or bidirectional stream with the same size and format
6457 and return that stream */
6458 if ((mInputStreamInfo.dim.width > 0) &&
6459 (mInputStreamInfo.dim.height > 0)) {
6460 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6461 it != mStreamInfo.end(); it++) {
6462
6463 camera3_stream_t *stream = (*it)->stream;
6464 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6465 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6466 (stream->format == mInputStreamInfo.format)) {
6467 // Usage flag for an input stream and the source output stream
6468 // may be different.
6469 LOGD("Found reprocessible output stream! %p", *it);
6470 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6471 stream->usage, mInputStreamInfo.usage);
6472
6473 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6474 if (channel != NULL && channel->mStreams[0]) {
6475 id = channel->mStreams[0]->getMyServerID();
6476 return NO_ERROR;
6477 }
6478 }
6479 }
6480 } else {
6481 LOGD("No input stream, so no reprocessible output stream");
6482 }
6483 return NAME_NOT_FOUND;
6484}
6485
6486/*===========================================================================
6487 * FUNCTION : lookupFwkName
6488 *
6489 * DESCRIPTION: In case the enum is not same in fwk and backend
6490 * make sure the parameter is correctly propogated
6491 *
6492 * PARAMETERS :
6493 * @arr : map between the two enums
6494 * @len : len of the map
6495 * @hal_name : name of the hal_parm to map
6496 *
6497 * RETURN : int type of status
6498 * fwk_name -- success
6499 * none-zero failure code
6500 *==========================================================================*/
6501template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6502 size_t len, halType hal_name)
6503{
6504
6505 for (size_t i = 0; i < len; i++) {
6506 if (arr[i].hal_name == hal_name) {
6507 return arr[i].fwk_name;
6508 }
6509 }
6510
6511 /* Not able to find matching framework type is not necessarily
6512 * an error case. This happens when mm-camera supports more attributes
6513 * than the frameworks do */
6514 LOGH("Cannot find matching framework type");
6515 return NAME_NOT_FOUND;
6516}
6517
6518/*===========================================================================
6519 * FUNCTION : lookupHalName
6520 *
6521 * DESCRIPTION: In case the enum is not same in fwk and backend
6522 * make sure the parameter is correctly propogated
6523 *
6524 * PARAMETERS :
6525 * @arr : map between the two enums
6526 * @len : len of the map
6527 * @fwk_name : name of the hal_parm to map
6528 *
6529 * RETURN : int32_t type of status
6530 * hal_name -- success
6531 * none-zero failure code
6532 *==========================================================================*/
6533template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6534 size_t len, fwkType fwk_name)
6535{
6536 for (size_t i = 0; i < len; i++) {
6537 if (arr[i].fwk_name == fwk_name) {
6538 return arr[i].hal_name;
6539 }
6540 }
6541
6542 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6543 return NAME_NOT_FOUND;
6544}
6545
6546/*===========================================================================
6547 * FUNCTION : lookupProp
6548 *
6549 * DESCRIPTION: lookup a value by its name
6550 *
6551 * PARAMETERS :
6552 * @arr : map between the two enums
6553 * @len : size of the map
6554 * @name : name to be looked up
6555 *
6556 * RETURN : Value if found
6557 * CAM_CDS_MODE_MAX if not found
6558 *==========================================================================*/
6559template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6560 size_t len, const char *name)
6561{
6562 if (name) {
6563 for (size_t i = 0; i < len; i++) {
6564 if (!strcmp(arr[i].desc, name)) {
6565 return arr[i].val;
6566 }
6567 }
6568 }
6569 return CAM_CDS_MODE_MAX;
6570}
6571
6572/*===========================================================================
6573 *
6574 * DESCRIPTION:
6575 *
6576 * PARAMETERS :
6577 * @metadata : metadata information from callback
6578 * @timestamp: metadata buffer timestamp
6579 * @request_id: request id
6580 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006581 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006582 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6583 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006584 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006585 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6586 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006587 *
6588 * RETURN : camera_metadata_t*
6589 * metadata in a format specified by fwk
6590 *==========================================================================*/
6591camera_metadata_t*
6592QCamera3HardwareInterface::translateFromHalMetadata(
6593 metadata_buffer_t *metadata,
6594 nsecs_t timestamp,
6595 int32_t request_id,
6596 const CameraMetadata& jpegMetadata,
6597 uint8_t pipeline_depth,
6598 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006599 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006600 /* DevCamDebug metadata translateFromHalMetadata argument */
6601 uint8_t DevCamDebug_meta_enable,
6602 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006603 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006604 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006605 bool lastMetadataInBatch,
6606 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006607{
6608 CameraMetadata camMetadata;
6609 camera_metadata_t *resultMetadata;
6610
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006611 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006612 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6613 * Timestamp is needed because it's used for shutter notify calculation.
6614 * */
6615 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6616 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006617 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006618 }
6619
Thierry Strudel3d639192016-09-09 11:52:26 -07006620 if (jpegMetadata.entryCount())
6621 camMetadata.append(jpegMetadata);
6622
6623 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6624 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6625 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6626 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006627 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006628 if (mBatchSize == 0) {
6629 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6630 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6631 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006632
Samuel Ha68ba5172016-12-15 18:41:12 -08006633 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6634 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6635 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6636 // DevCamDebug metadata translateFromHalMetadata AF
6637 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6638 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6639 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6640 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6641 }
6642 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6643 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6644 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6645 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6646 }
6647 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6648 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6649 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6650 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6651 }
6652 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6653 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6654 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6655 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6656 }
6657 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6658 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6659 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6660 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6661 }
6662 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6663 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6664 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6665 *DevCamDebug_af_monitor_pdaf_target_pos;
6666 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6667 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6668 }
6669 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6670 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6671 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6672 *DevCamDebug_af_monitor_pdaf_confidence;
6673 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6674 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6675 }
6676 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6677 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6678 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6679 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6680 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6681 }
6682 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6683 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6684 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6685 *DevCamDebug_af_monitor_tof_target_pos;
6686 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6687 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6688 }
6689 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6690 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6691 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6692 *DevCamDebug_af_monitor_tof_confidence;
6693 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6694 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6695 }
6696 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6697 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6698 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6699 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6700 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6701 }
6702 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6703 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6704 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6705 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6706 &fwk_DevCamDebug_af_monitor_type_select, 1);
6707 }
6708 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6709 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6710 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6711 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6712 &fwk_DevCamDebug_af_monitor_refocus, 1);
6713 }
6714 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6715 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6716 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6717 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6718 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6719 }
6720 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6721 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6722 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6723 *DevCamDebug_af_search_pdaf_target_pos;
6724 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6725 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6726 }
6727 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6728 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6729 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6730 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6731 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6732 }
6733 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6734 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6735 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6736 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6737 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6738 }
6739 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6740 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6741 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6742 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6743 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6744 }
6745 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6746 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6747 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6748 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6749 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6750 }
6751 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6752 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6753 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6754 *DevCamDebug_af_search_tof_target_pos;
6755 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6756 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6757 }
6758 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6759 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6760 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6761 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6762 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6763 }
6764 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6765 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6766 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6767 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6768 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6769 }
6770 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6771 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6772 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6773 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6774 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6775 }
6776 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6777 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6778 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6779 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6780 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6781 }
6782 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6783 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6784 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6785 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6786 &fwk_DevCamDebug_af_search_type_select, 1);
6787 }
6788 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6789 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6790 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6791 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6792 &fwk_DevCamDebug_af_search_next_pos, 1);
6793 }
6794 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6795 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6796 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6797 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6798 &fwk_DevCamDebug_af_search_target_pos, 1);
6799 }
6800 // DevCamDebug metadata translateFromHalMetadata AEC
6801 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6802 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6803 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6804 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6805 }
6806 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6807 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6808 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6809 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6810 }
6811 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6812 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6813 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6814 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6815 }
6816 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6817 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6818 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6819 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6820 }
6821 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6822 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6823 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6824 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6825 }
6826 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6827 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6828 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6829 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6832 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6833 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6834 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6835 }
6836 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6837 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6838 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6839 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6840 }
Samuel Ha34229982017-02-17 13:51:11 -08006841 // DevCamDebug metadata translateFromHalMetadata zzHDR
6842 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6843 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6844 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6845 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6846 }
6847 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6848 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006849 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006850 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6851 }
6852 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6853 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6854 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6855 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6856 }
6857 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6858 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006859 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006860 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6861 }
6862 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6863 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6864 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6865 *DevCamDebug_aec_hdr_sensitivity_ratio;
6866 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6867 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6868 }
6869 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6870 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6871 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6872 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6873 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6874 }
6875 // DevCamDebug metadata translateFromHalMetadata ADRC
6876 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6877 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6878 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6879 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6880 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6881 }
6882 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6883 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6884 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6885 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6886 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6887 }
6888 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6889 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6890 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6891 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6892 }
6893 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6894 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6895 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6896 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6897 }
6898 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6899 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6900 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6901 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6902 }
6903 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6904 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6905 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6906 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6907 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006908 // DevCamDebug metadata translateFromHalMetadata AWB
6909 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6910 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6911 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6912 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6913 }
6914 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6915 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6916 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6917 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6918 }
6919 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6920 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6921 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6922 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6923 }
6924 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6925 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6926 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6927 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6928 }
6929 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6930 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6931 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6932 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6933 }
6934 }
6935 // atrace_end(ATRACE_TAG_ALWAYS);
6936
Thierry Strudel3d639192016-09-09 11:52:26 -07006937 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6938 int64_t fwk_frame_number = *frame_number;
6939 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6940 }
6941
6942 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6943 int32_t fps_range[2];
6944 fps_range[0] = (int32_t)float_range->min_fps;
6945 fps_range[1] = (int32_t)float_range->max_fps;
6946 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6947 fps_range, 2);
6948 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6949 fps_range[0], fps_range[1]);
6950 }
6951
6952 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6953 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6954 }
6955
6956 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6957 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6958 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6959 *sceneMode);
6960 if (NAME_NOT_FOUND != val) {
6961 uint8_t fwkSceneMode = (uint8_t)val;
6962 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6963 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6964 fwkSceneMode);
6965 }
6966 }
6967
6968 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6969 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6970 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6971 }
6972
6973 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6974 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6975 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6976 }
6977
6978 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6979 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6980 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6981 }
6982
6983 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6984 CAM_INTF_META_EDGE_MODE, metadata) {
6985 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6986 }
6987
6988 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6989 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6990 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6991 }
6992
6993 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6994 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6995 }
6996
6997 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6998 if (0 <= *flashState) {
6999 uint8_t fwk_flashState = (uint8_t) *flashState;
7000 if (!gCamCapability[mCameraId]->flash_available) {
7001 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7002 }
7003 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7004 }
7005 }
7006
7007 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7008 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7009 if (NAME_NOT_FOUND != val) {
7010 uint8_t fwk_flashMode = (uint8_t)val;
7011 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7012 }
7013 }
7014
7015 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7016 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7017 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7018 }
7019
7020 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7021 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7022 }
7023
7024 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7025 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7026 }
7027
7028 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7029 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7030 }
7031
7032 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7033 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7034 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7035 }
7036
7037 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7038 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7039 LOGD("fwk_videoStab = %d", fwk_videoStab);
7040 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7041 } else {
7042 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7043 // and so hardcoding the Video Stab result to OFF mode.
7044 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7045 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007046 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007047 }
7048
7049 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7050 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7051 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7052 }
7053
7054 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7055 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7056 }
7057
Thierry Strudel3d639192016-09-09 11:52:26 -07007058 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7059 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007060 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007061
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007062 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7063 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007064
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007065 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007066 blackLevelAppliedPattern->cam_black_level[0],
7067 blackLevelAppliedPattern->cam_black_level[1],
7068 blackLevelAppliedPattern->cam_black_level[2],
7069 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007070 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7071 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007072
7073#ifndef USE_HAL_3_3
7074 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307075 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007076 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307077 fwk_blackLevelInd[0] /= 16.0;
7078 fwk_blackLevelInd[1] /= 16.0;
7079 fwk_blackLevelInd[2] /= 16.0;
7080 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007081 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7082 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007083#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007084 }
7085
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007086#ifndef USE_HAL_3_3
7087 // Fixed whitelevel is used by ISP/Sensor
7088 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7089 &gCamCapability[mCameraId]->white_level, 1);
7090#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007091
7092 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7093 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7094 int32_t scalerCropRegion[4];
7095 scalerCropRegion[0] = hScalerCropRegion->left;
7096 scalerCropRegion[1] = hScalerCropRegion->top;
7097 scalerCropRegion[2] = hScalerCropRegion->width;
7098 scalerCropRegion[3] = hScalerCropRegion->height;
7099
7100 // Adjust crop region from sensor output coordinate system to active
7101 // array coordinate system.
7102 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7103 scalerCropRegion[2], scalerCropRegion[3]);
7104
7105 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7106 }
7107
7108 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7109 LOGD("sensorExpTime = %lld", *sensorExpTime);
7110 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7111 }
7112
7113 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7114 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7115 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7116 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7117 }
7118
7119 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7120 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7121 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7122 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7123 sensorRollingShutterSkew, 1);
7124 }
7125
7126 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7127 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7128 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7129
7130 //calculate the noise profile based on sensitivity
7131 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7132 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7133 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7134 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7135 noise_profile[i] = noise_profile_S;
7136 noise_profile[i+1] = noise_profile_O;
7137 }
7138 LOGD("noise model entry (S, O) is (%f, %f)",
7139 noise_profile_S, noise_profile_O);
7140 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7141 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7142 }
7143
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007144#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007145 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007146 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007147 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007148 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007149 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7150 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7151 }
7152 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007153#endif
7154
Thierry Strudel3d639192016-09-09 11:52:26 -07007155 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7156 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7157 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7158 }
7159
7160 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7161 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7162 *faceDetectMode);
7163 if (NAME_NOT_FOUND != val) {
7164 uint8_t fwk_faceDetectMode = (uint8_t)val;
7165 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7166
7167 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7168 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7169 CAM_INTF_META_FACE_DETECTION, metadata) {
7170 uint8_t numFaces = MIN(
7171 faceDetectionInfo->num_faces_detected, MAX_ROI);
7172 int32_t faceIds[MAX_ROI];
7173 uint8_t faceScores[MAX_ROI];
7174 int32_t faceRectangles[MAX_ROI * 4];
7175 int32_t faceLandmarks[MAX_ROI * 6];
7176 size_t j = 0, k = 0;
7177
7178 for (size_t i = 0; i < numFaces; i++) {
7179 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7180 // Adjust crop region from sensor output coordinate system to active
7181 // array coordinate system.
7182 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7183 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7184 rect.width, rect.height);
7185
7186 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7187 faceRectangles+j, -1);
7188
Jason Lee8ce36fa2017-04-19 19:40:37 -07007189 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7190 "bottom-right (%d, %d)",
7191 faceDetectionInfo->frame_id, i,
7192 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7193 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7194
Thierry Strudel3d639192016-09-09 11:52:26 -07007195 j+= 4;
7196 }
7197 if (numFaces <= 0) {
7198 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7199 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7200 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7201 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7202 }
7203
7204 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7205 numFaces);
7206 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7207 faceRectangles, numFaces * 4U);
7208 if (fwk_faceDetectMode ==
7209 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7210 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7211 CAM_INTF_META_FACE_LANDMARK, metadata) {
7212
7213 for (size_t i = 0; i < numFaces; i++) {
7214 // Map the co-ordinate sensor output coordinate system to active
7215 // array coordinate system.
7216 mCropRegionMapper.toActiveArray(
7217 landmarks->face_landmarks[i].left_eye_center.x,
7218 landmarks->face_landmarks[i].left_eye_center.y);
7219 mCropRegionMapper.toActiveArray(
7220 landmarks->face_landmarks[i].right_eye_center.x,
7221 landmarks->face_landmarks[i].right_eye_center.y);
7222 mCropRegionMapper.toActiveArray(
7223 landmarks->face_landmarks[i].mouth_center.x,
7224 landmarks->face_landmarks[i].mouth_center.y);
7225
7226 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007227
7228 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7229 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7230 faceDetectionInfo->frame_id, i,
7231 faceLandmarks[k + LEFT_EYE_X],
7232 faceLandmarks[k + LEFT_EYE_Y],
7233 faceLandmarks[k + RIGHT_EYE_X],
7234 faceLandmarks[k + RIGHT_EYE_Y],
7235 faceLandmarks[k + MOUTH_X],
7236 faceLandmarks[k + MOUTH_Y]);
7237
Thierry Strudel04e026f2016-10-10 11:27:36 -07007238 k+= TOTAL_LANDMARK_INDICES;
7239 }
7240 } else {
7241 for (size_t i = 0; i < numFaces; i++) {
7242 setInvalidLandmarks(faceLandmarks+k);
7243 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007244 }
7245 }
7246
Jason Lee49619db2017-04-13 12:07:22 -07007247 for (size_t i = 0; i < numFaces; i++) {
7248 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7249
7250 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7251 faceDetectionInfo->frame_id, i, faceIds[i]);
7252 }
7253
Thierry Strudel3d639192016-09-09 11:52:26 -07007254 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7255 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7256 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007257 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007258 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7259 CAM_INTF_META_FACE_BLINK, metadata) {
7260 uint8_t detected[MAX_ROI];
7261 uint8_t degree[MAX_ROI * 2];
7262 for (size_t i = 0; i < numFaces; i++) {
7263 detected[i] = blinks->blink[i].blink_detected;
7264 degree[2 * i] = blinks->blink[i].left_blink;
7265 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007266
Jason Lee49619db2017-04-13 12:07:22 -07007267 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7268 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7269 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7270 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007271 }
7272 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7273 detected, numFaces);
7274 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7275 degree, numFaces * 2);
7276 }
7277 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7278 CAM_INTF_META_FACE_SMILE, metadata) {
7279 uint8_t degree[MAX_ROI];
7280 uint8_t confidence[MAX_ROI];
7281 for (size_t i = 0; i < numFaces; i++) {
7282 degree[i] = smiles->smile[i].smile_degree;
7283 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007284
Jason Lee49619db2017-04-13 12:07:22 -07007285 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7286 "smile_degree=%d, smile_score=%d",
7287 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007288 }
7289 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7290 degree, numFaces);
7291 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7292 confidence, numFaces);
7293 }
7294 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7295 CAM_INTF_META_FACE_GAZE, metadata) {
7296 int8_t angle[MAX_ROI];
7297 int32_t direction[MAX_ROI * 3];
7298 int8_t degree[MAX_ROI * 2];
7299 for (size_t i = 0; i < numFaces; i++) {
7300 angle[i] = gazes->gaze[i].gaze_angle;
7301 direction[3 * i] = gazes->gaze[i].updown_dir;
7302 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7303 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7304 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7305 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007306
7307 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7308 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7309 "left_right_gaze=%d, top_bottom_gaze=%d",
7310 faceDetectionInfo->frame_id, i, angle[i],
7311 direction[3 * i], direction[3 * i + 1],
7312 direction[3 * i + 2],
7313 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007314 }
7315 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7316 (uint8_t *)angle, numFaces);
7317 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7318 direction, numFaces * 3);
7319 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7320 (uint8_t *)degree, numFaces * 2);
7321 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007322 }
7323 }
7324 }
7325 }
7326
7327 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7328 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007329 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007330 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007331 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007332
Shuzhen Wang14415f52016-11-16 18:26:18 -08007333 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7334 histogramBins = *histBins;
7335 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7336 }
7337
7338 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007339 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7340 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007341 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007342
7343 switch (stats_data->type) {
7344 case CAM_HISTOGRAM_TYPE_BAYER:
7345 switch (stats_data->bayer_stats.data_type) {
7346 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007347 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7348 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007349 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007350 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7351 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007352 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007353 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7354 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007355 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007356 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007357 case CAM_STATS_CHANNEL_R:
7358 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007359 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7360 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007361 }
7362 break;
7363 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007364 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007365 break;
7366 }
7367
Shuzhen Wang14415f52016-11-16 18:26:18 -08007368 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007369 }
7370 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007371 }
7372
7373 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7374 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7375 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7376 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7377 }
7378
7379 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7380 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7381 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7382 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7383 }
7384
7385 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7386 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7387 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7388 CAM_MAX_SHADING_MAP_HEIGHT);
7389 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7390 CAM_MAX_SHADING_MAP_WIDTH);
7391 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7392 lensShadingMap->lens_shading, 4U * map_width * map_height);
7393 }
7394
7395 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7396 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7397 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7398 }
7399
7400 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7401 //Populate CAM_INTF_META_TONEMAP_CURVES
7402 /* ch0 = G, ch 1 = B, ch 2 = R*/
7403 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7404 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7405 tonemap->tonemap_points_cnt,
7406 CAM_MAX_TONEMAP_CURVE_SIZE);
7407 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7408 }
7409
7410 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7411 &tonemap->curves[0].tonemap_points[0][0],
7412 tonemap->tonemap_points_cnt * 2);
7413
7414 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7415 &tonemap->curves[1].tonemap_points[0][0],
7416 tonemap->tonemap_points_cnt * 2);
7417
7418 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7419 &tonemap->curves[2].tonemap_points[0][0],
7420 tonemap->tonemap_points_cnt * 2);
7421 }
7422
7423 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7424 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7425 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7426 CC_GAIN_MAX);
7427 }
7428
7429 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7430 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7431 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7432 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7433 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7434 }
7435
7436 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7437 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7438 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7439 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7440 toneCurve->tonemap_points_cnt,
7441 CAM_MAX_TONEMAP_CURVE_SIZE);
7442 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7443 }
7444 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7445 (float*)toneCurve->curve.tonemap_points,
7446 toneCurve->tonemap_points_cnt * 2);
7447 }
7448
7449 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7450 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7451 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7452 predColorCorrectionGains->gains, 4);
7453 }
7454
7455 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7456 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7457 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7458 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7459 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7460 }
7461
7462 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7463 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7464 }
7465
7466 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7467 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7468 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7469 }
7470
7471 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7472 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7473 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7474 }
7475
7476 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7477 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7478 *effectMode);
7479 if (NAME_NOT_FOUND != val) {
7480 uint8_t fwk_effectMode = (uint8_t)val;
7481 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7482 }
7483 }
7484
7485 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7486 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7487 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7488 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7489 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7490 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7491 }
7492 int32_t fwk_testPatternData[4];
7493 fwk_testPatternData[0] = testPatternData->r;
7494 fwk_testPatternData[3] = testPatternData->b;
7495 switch (gCamCapability[mCameraId]->color_arrangement) {
7496 case CAM_FILTER_ARRANGEMENT_RGGB:
7497 case CAM_FILTER_ARRANGEMENT_GRBG:
7498 fwk_testPatternData[1] = testPatternData->gr;
7499 fwk_testPatternData[2] = testPatternData->gb;
7500 break;
7501 case CAM_FILTER_ARRANGEMENT_GBRG:
7502 case CAM_FILTER_ARRANGEMENT_BGGR:
7503 fwk_testPatternData[2] = testPatternData->gr;
7504 fwk_testPatternData[1] = testPatternData->gb;
7505 break;
7506 default:
7507 LOGE("color arrangement %d is not supported",
7508 gCamCapability[mCameraId]->color_arrangement);
7509 break;
7510 }
7511 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7512 }
7513
7514 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7515 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7516 }
7517
7518 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7519 String8 str((const char *)gps_methods);
7520 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7521 }
7522
7523 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7524 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7525 }
7526
7527 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7528 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7529 }
7530
7531 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7532 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7533 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7534 }
7535
7536 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7537 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7538 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7539 }
7540
7541 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7542 int32_t fwk_thumb_size[2];
7543 fwk_thumb_size[0] = thumb_size->width;
7544 fwk_thumb_size[1] = thumb_size->height;
7545 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7546 }
7547
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007548 // Skip reprocess metadata if there is no input stream.
7549 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7550 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7551 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7552 privateData,
7553 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7554 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007555 }
7556
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007557 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007558 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007559 meteringMode, 1);
7560 }
7561
Thierry Strudel54dc9782017-02-15 12:12:10 -08007562 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7563 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7564 LOGD("hdr_scene_data: %d %f\n",
7565 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7566 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7567 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7568 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7569 &isHdr, 1);
7570 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7571 &isHdrConfidence, 1);
7572 }
7573
7574
7575
Thierry Strudel3d639192016-09-09 11:52:26 -07007576 if (metadata->is_tuning_params_valid) {
7577 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7578 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7579 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7580
7581
7582 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7583 sizeof(uint32_t));
7584 data += sizeof(uint32_t);
7585
7586 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7587 sizeof(uint32_t));
7588 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7589 data += sizeof(uint32_t);
7590
7591 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7592 sizeof(uint32_t));
7593 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7594 data += sizeof(uint32_t);
7595
7596 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7597 sizeof(uint32_t));
7598 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7599 data += sizeof(uint32_t);
7600
7601 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7602 sizeof(uint32_t));
7603 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7604 data += sizeof(uint32_t);
7605
7606 metadata->tuning_params.tuning_mod3_data_size = 0;
7607 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7608 sizeof(uint32_t));
7609 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7610 data += sizeof(uint32_t);
7611
7612 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7613 TUNING_SENSOR_DATA_MAX);
7614 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7615 count);
7616 data += count;
7617
7618 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7619 TUNING_VFE_DATA_MAX);
7620 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7621 count);
7622 data += count;
7623
7624 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7625 TUNING_CPP_DATA_MAX);
7626 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7627 count);
7628 data += count;
7629
7630 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7631 TUNING_CAC_DATA_MAX);
7632 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7633 count);
7634 data += count;
7635
7636 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7637 (int32_t *)(void *)tuning_meta_data_blob,
7638 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7639 }
7640
7641 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7642 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7643 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7644 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7645 NEUTRAL_COL_POINTS);
7646 }
7647
7648 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7649 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7650 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7651 }
7652
7653 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7654 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7655 // Adjust crop region from sensor output coordinate system to active
7656 // array coordinate system.
7657 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7658 hAeRegions->rect.width, hAeRegions->rect.height);
7659
7660 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7661 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7662 REGIONS_TUPLE_COUNT);
7663 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7664 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7665 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7666 hAeRegions->rect.height);
7667 }
7668
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007669 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7670 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7671 if (NAME_NOT_FOUND != val) {
7672 uint8_t fwkAfMode = (uint8_t)val;
7673 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7674 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7675 } else {
7676 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7677 val);
7678 }
7679 }
7680
Thierry Strudel3d639192016-09-09 11:52:26 -07007681 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7682 uint8_t fwk_afState = (uint8_t) *afState;
7683 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007684 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007685 }
7686
7687 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7688 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7689 }
7690
7691 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7692 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7693 }
7694
7695 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7696 uint8_t fwk_lensState = *lensState;
7697 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7698 }
7699
Thierry Strudel3d639192016-09-09 11:52:26 -07007700
7701 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007702 uint32_t ab_mode = *hal_ab_mode;
7703 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7704 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7705 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7706 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007707 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007708 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007709 if (NAME_NOT_FOUND != val) {
7710 uint8_t fwk_ab_mode = (uint8_t)val;
7711 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7712 }
7713 }
7714
7715 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7716 int val = lookupFwkName(SCENE_MODES_MAP,
7717 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7718 if (NAME_NOT_FOUND != val) {
7719 uint8_t fwkBestshotMode = (uint8_t)val;
7720 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7721 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7722 } else {
7723 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7724 }
7725 }
7726
7727 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7728 uint8_t fwk_mode = (uint8_t) *mode;
7729 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7730 }
7731
7732 /* Constant metadata values to be update*/
7733 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7734 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7735
7736 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7737 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7738
7739 int32_t hotPixelMap[2];
7740 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7741
7742 // CDS
7743 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7744 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7745 }
7746
Thierry Strudel04e026f2016-10-10 11:27:36 -07007747 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7748 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007749 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007750 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7751 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7752 } else {
7753 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7754 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007755
7756 if(fwk_hdr != curr_hdr_state) {
7757 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7758 if(fwk_hdr)
7759 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7760 else
7761 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7762 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007763 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7764 }
7765
Thierry Strudel54dc9782017-02-15 12:12:10 -08007766 //binning correction
7767 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7768 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7769 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7770 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7771 }
7772
Thierry Strudel04e026f2016-10-10 11:27:36 -07007773 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007774 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007775 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7776 int8_t is_ir_on = 0;
7777
7778 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7779 if(is_ir_on != curr_ir_state) {
7780 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7781 if(is_ir_on)
7782 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7783 else
7784 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7785 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007786 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007787 }
7788
Thierry Strudel269c81a2016-10-12 12:13:59 -07007789 // AEC SPEED
7790 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7791 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7792 }
7793
7794 // AWB SPEED
7795 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7796 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7797 }
7798
Thierry Strudel3d639192016-09-09 11:52:26 -07007799 // TNR
7800 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7801 uint8_t tnr_enable = tnr->denoise_enable;
7802 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007803 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7804 int8_t is_tnr_on = 0;
7805
7806 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7807 if(is_tnr_on != curr_tnr_state) {
7808 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7809 if(is_tnr_on)
7810 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7811 else
7812 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7813 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007814
7815 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7816 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7817 }
7818
7819 // Reprocess crop data
7820 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7821 uint8_t cnt = crop_data->num_of_streams;
7822 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7823 // mm-qcamera-daemon only posts crop_data for streams
7824 // not linked to pproc. So no valid crop metadata is not
7825 // necessarily an error case.
7826 LOGD("No valid crop metadata entries");
7827 } else {
7828 uint32_t reproc_stream_id;
7829 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7830 LOGD("No reprocessible stream found, ignore crop data");
7831 } else {
7832 int rc = NO_ERROR;
7833 Vector<int32_t> roi_map;
7834 int32_t *crop = new int32_t[cnt*4];
7835 if (NULL == crop) {
7836 rc = NO_MEMORY;
7837 }
7838 if (NO_ERROR == rc) {
7839 int32_t streams_found = 0;
7840 for (size_t i = 0; i < cnt; i++) {
7841 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7842 if (pprocDone) {
7843 // HAL already does internal reprocessing,
7844 // either via reprocessing before JPEG encoding,
7845 // or offline postprocessing for pproc bypass case.
7846 crop[0] = 0;
7847 crop[1] = 0;
7848 crop[2] = mInputStreamInfo.dim.width;
7849 crop[3] = mInputStreamInfo.dim.height;
7850 } else {
7851 crop[0] = crop_data->crop_info[i].crop.left;
7852 crop[1] = crop_data->crop_info[i].crop.top;
7853 crop[2] = crop_data->crop_info[i].crop.width;
7854 crop[3] = crop_data->crop_info[i].crop.height;
7855 }
7856 roi_map.add(crop_data->crop_info[i].roi_map.left);
7857 roi_map.add(crop_data->crop_info[i].roi_map.top);
7858 roi_map.add(crop_data->crop_info[i].roi_map.width);
7859 roi_map.add(crop_data->crop_info[i].roi_map.height);
7860 streams_found++;
7861 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7862 crop[0], crop[1], crop[2], crop[3]);
7863 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7864 crop_data->crop_info[i].roi_map.left,
7865 crop_data->crop_info[i].roi_map.top,
7866 crop_data->crop_info[i].roi_map.width,
7867 crop_data->crop_info[i].roi_map.height);
7868 break;
7869
7870 }
7871 }
7872 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7873 &streams_found, 1);
7874 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7875 crop, (size_t)(streams_found * 4));
7876 if (roi_map.array()) {
7877 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7878 roi_map.array(), roi_map.size());
7879 }
7880 }
7881 if (crop) {
7882 delete [] crop;
7883 }
7884 }
7885 }
7886 }
7887
7888 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7889 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7890 // so hardcoding the CAC result to OFF mode.
7891 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7892 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7893 } else {
7894 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7895 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7896 *cacMode);
7897 if (NAME_NOT_FOUND != val) {
7898 uint8_t resultCacMode = (uint8_t)val;
7899 // check whether CAC result from CB is equal to Framework set CAC mode
7900 // If not equal then set the CAC mode came in corresponding request
7901 if (fwk_cacMode != resultCacMode) {
7902 resultCacMode = fwk_cacMode;
7903 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007904 //Check if CAC is disabled by property
7905 if (m_cacModeDisabled) {
7906 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7907 }
7908
Thierry Strudel3d639192016-09-09 11:52:26 -07007909 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7910 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7911 } else {
7912 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7913 }
7914 }
7915 }
7916
7917 // Post blob of cam_cds_data through vendor tag.
7918 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7919 uint8_t cnt = cdsInfo->num_of_streams;
7920 cam_cds_data_t cdsDataOverride;
7921 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7922 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7923 cdsDataOverride.num_of_streams = 1;
7924 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7925 uint32_t reproc_stream_id;
7926 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7927 LOGD("No reprocessible stream found, ignore cds data");
7928 } else {
7929 for (size_t i = 0; i < cnt; i++) {
7930 if (cdsInfo->cds_info[i].stream_id ==
7931 reproc_stream_id) {
7932 cdsDataOverride.cds_info[0].cds_enable =
7933 cdsInfo->cds_info[i].cds_enable;
7934 break;
7935 }
7936 }
7937 }
7938 } else {
7939 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7940 }
7941 camMetadata.update(QCAMERA3_CDS_INFO,
7942 (uint8_t *)&cdsDataOverride,
7943 sizeof(cam_cds_data_t));
7944 }
7945
7946 // Ldaf calibration data
7947 if (!mLdafCalibExist) {
7948 IF_META_AVAILABLE(uint32_t, ldafCalib,
7949 CAM_INTF_META_LDAF_EXIF, metadata) {
7950 mLdafCalibExist = true;
7951 mLdafCalib[0] = ldafCalib[0];
7952 mLdafCalib[1] = ldafCalib[1];
7953 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7954 ldafCalib[0], ldafCalib[1]);
7955 }
7956 }
7957
Thierry Strudel54dc9782017-02-15 12:12:10 -08007958 // EXIF debug data through vendor tag
7959 /*
7960 * Mobicat Mask can assume 3 values:
7961 * 1 refers to Mobicat data,
7962 * 2 refers to Stats Debug and Exif Debug Data
7963 * 3 refers to Mobicat and Stats Debug Data
7964 * We want to make sure that we are sending Exif debug data
7965 * only when Mobicat Mask is 2.
7966 */
7967 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7968 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7969 (uint8_t *)(void *)mExifParams.debug_params,
7970 sizeof(mm_jpeg_debug_exif_params_t));
7971 }
7972
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007973 // Reprocess and DDM debug data through vendor tag
7974 cam_reprocess_info_t repro_info;
7975 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007976 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7977 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007978 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007979 }
7980 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7981 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007982 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007983 }
7984 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7985 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007986 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007987 }
7988 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7989 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007990 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007991 }
7992 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7993 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007994 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007995 }
7996 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007997 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007998 }
7999 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8000 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008001 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008002 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008003 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8004 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8005 }
8006 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8007 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8008 }
8009 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8010 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008011
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008012 // INSTANT AEC MODE
8013 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8014 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8015 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8016 }
8017
Shuzhen Wange763e802016-03-31 10:24:29 -07008018 // AF scene change
8019 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8020 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8021 }
8022
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008023 // Enable ZSL
8024 if (enableZsl != nullptr) {
8025 uint8_t value = *enableZsl ?
8026 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8027 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8028 }
8029
Xu Han821ea9c2017-05-23 09:00:40 -07008030 // OIS Data
8031 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8032 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8033 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8034 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8035 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8036 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8037 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8038 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8039 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8040 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8041 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8042 }
8043
Thierry Strudel3d639192016-09-09 11:52:26 -07008044 resultMetadata = camMetadata.release();
8045 return resultMetadata;
8046}
8047
8048/*===========================================================================
8049 * FUNCTION : saveExifParams
8050 *
8051 * DESCRIPTION:
8052 *
8053 * PARAMETERS :
8054 * @metadata : metadata information from callback
8055 *
8056 * RETURN : none
8057 *
8058 *==========================================================================*/
8059void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8060{
8061 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8062 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8063 if (mExifParams.debug_params) {
8064 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8065 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8066 }
8067 }
8068 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8069 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8070 if (mExifParams.debug_params) {
8071 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8072 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8073 }
8074 }
8075 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8076 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8077 if (mExifParams.debug_params) {
8078 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8079 mExifParams.debug_params->af_debug_params_valid = TRUE;
8080 }
8081 }
8082 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8083 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8084 if (mExifParams.debug_params) {
8085 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8086 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8087 }
8088 }
8089 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8090 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8091 if (mExifParams.debug_params) {
8092 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8093 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8094 }
8095 }
8096 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8097 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8098 if (mExifParams.debug_params) {
8099 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8100 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8101 }
8102 }
8103 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8104 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8105 if (mExifParams.debug_params) {
8106 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8107 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8108 }
8109 }
8110 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8111 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8112 if (mExifParams.debug_params) {
8113 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8114 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8115 }
8116 }
8117}
8118
8119/*===========================================================================
8120 * FUNCTION : get3AExifParams
8121 *
8122 * DESCRIPTION:
8123 *
8124 * PARAMETERS : none
8125 *
8126 *
8127 * RETURN : mm_jpeg_exif_params_t
8128 *
8129 *==========================================================================*/
8130mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8131{
8132 return mExifParams;
8133}
8134
8135/*===========================================================================
8136 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8137 *
8138 * DESCRIPTION:
8139 *
8140 * PARAMETERS :
8141 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008142 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8143 * urgent metadata in a batch. Always true for
8144 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008145 *
8146 * RETURN : camera_metadata_t*
8147 * metadata in a format specified by fwk
8148 *==========================================================================*/
8149camera_metadata_t*
8150QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008151 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008152{
8153 CameraMetadata camMetadata;
8154 camera_metadata_t *resultMetadata;
8155
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008156 if (!lastUrgentMetadataInBatch) {
8157 /* In batch mode, use empty metadata if this is not the last in batch
8158 */
8159 resultMetadata = allocate_camera_metadata(0, 0);
8160 return resultMetadata;
8161 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008162
8163 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8164 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8165 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8166 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8167 }
8168
8169 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8170 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8171 &aecTrigger->trigger, 1);
8172 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8173 &aecTrigger->trigger_id, 1);
8174 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8175 aecTrigger->trigger);
8176 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8177 aecTrigger->trigger_id);
8178 }
8179
8180 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8181 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8182 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8183 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8184 }
8185
Thierry Strudel3d639192016-09-09 11:52:26 -07008186 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8187 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8188 &af_trigger->trigger, 1);
8189 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8190 af_trigger->trigger);
8191 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8192 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8193 af_trigger->trigger_id);
8194 }
8195
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008196 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8197 /*af regions*/
8198 int32_t afRegions[REGIONS_TUPLE_COUNT];
8199 // Adjust crop region from sensor output coordinate system to active
8200 // array coordinate system.
8201 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8202 hAfRegions->rect.width, hAfRegions->rect.height);
8203
8204 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8205 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8206 REGIONS_TUPLE_COUNT);
8207 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8208 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8209 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8210 hAfRegions->rect.height);
8211 }
8212
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008213 // AF region confidence
8214 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8215 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8216 }
8217
Thierry Strudel3d639192016-09-09 11:52:26 -07008218 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8219 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8220 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8221 if (NAME_NOT_FOUND != val) {
8222 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8223 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8224 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8225 } else {
8226 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8227 }
8228 }
8229
8230 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8231 uint32_t aeMode = CAM_AE_MODE_MAX;
8232 int32_t flashMode = CAM_FLASH_MODE_MAX;
8233 int32_t redeye = -1;
8234 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8235 aeMode = *pAeMode;
8236 }
8237 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8238 flashMode = *pFlashMode;
8239 }
8240 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8241 redeye = *pRedeye;
8242 }
8243
8244 if (1 == redeye) {
8245 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8246 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8247 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8248 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8249 flashMode);
8250 if (NAME_NOT_FOUND != val) {
8251 fwk_aeMode = (uint8_t)val;
8252 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8253 } else {
8254 LOGE("Unsupported flash mode %d", flashMode);
8255 }
8256 } else if (aeMode == CAM_AE_MODE_ON) {
8257 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8258 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8259 } else if (aeMode == CAM_AE_MODE_OFF) {
8260 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8261 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008262 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8263 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8264 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008265 } else {
8266 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8267 "flashMode:%d, aeMode:%u!!!",
8268 redeye, flashMode, aeMode);
8269 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008270 if (mInstantAEC) {
8271 // Increment frame Idx count untill a bound reached for instant AEC.
8272 mInstantAecFrameIdxCount++;
8273 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8274 CAM_INTF_META_AEC_INFO, metadata) {
8275 LOGH("ae_params->settled = %d",ae_params->settled);
8276 // If AEC settled, or if number of frames reached bound value,
8277 // should reset instant AEC.
8278 if (ae_params->settled ||
8279 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8280 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8281 mInstantAEC = false;
8282 mResetInstantAEC = true;
8283 mInstantAecFrameIdxCount = 0;
8284 }
8285 }
8286 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008287 resultMetadata = camMetadata.release();
8288 return resultMetadata;
8289}
8290
8291/*===========================================================================
8292 * FUNCTION : dumpMetadataToFile
8293 *
8294 * DESCRIPTION: Dumps tuning metadata to file system
8295 *
8296 * PARAMETERS :
8297 * @meta : tuning metadata
8298 * @dumpFrameCount : current dump frame count
8299 * @enabled : Enable mask
8300 *
8301 *==========================================================================*/
8302void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8303 uint32_t &dumpFrameCount,
8304 bool enabled,
8305 const char *type,
8306 uint32_t frameNumber)
8307{
8308 //Some sanity checks
8309 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8310 LOGE("Tuning sensor data size bigger than expected %d: %d",
8311 meta.tuning_sensor_data_size,
8312 TUNING_SENSOR_DATA_MAX);
8313 return;
8314 }
8315
8316 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8317 LOGE("Tuning VFE data size bigger than expected %d: %d",
8318 meta.tuning_vfe_data_size,
8319 TUNING_VFE_DATA_MAX);
8320 return;
8321 }
8322
8323 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8324 LOGE("Tuning CPP data size bigger than expected %d: %d",
8325 meta.tuning_cpp_data_size,
8326 TUNING_CPP_DATA_MAX);
8327 return;
8328 }
8329
8330 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8331 LOGE("Tuning CAC data size bigger than expected %d: %d",
8332 meta.tuning_cac_data_size,
8333 TUNING_CAC_DATA_MAX);
8334 return;
8335 }
8336 //
8337
8338 if(enabled){
8339 char timeBuf[FILENAME_MAX];
8340 char buf[FILENAME_MAX];
8341 memset(buf, 0, sizeof(buf));
8342 memset(timeBuf, 0, sizeof(timeBuf));
8343 time_t current_time;
8344 struct tm * timeinfo;
8345 time (&current_time);
8346 timeinfo = localtime (&current_time);
8347 if (timeinfo != NULL) {
8348 strftime (timeBuf, sizeof(timeBuf),
8349 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8350 }
8351 String8 filePath(timeBuf);
8352 snprintf(buf,
8353 sizeof(buf),
8354 "%dm_%s_%d.bin",
8355 dumpFrameCount,
8356 type,
8357 frameNumber);
8358 filePath.append(buf);
8359 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8360 if (file_fd >= 0) {
8361 ssize_t written_len = 0;
8362 meta.tuning_data_version = TUNING_DATA_VERSION;
8363 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8364 written_len += write(file_fd, data, sizeof(uint32_t));
8365 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8366 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8367 written_len += write(file_fd, data, sizeof(uint32_t));
8368 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8369 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8370 written_len += write(file_fd, data, sizeof(uint32_t));
8371 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8372 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8373 written_len += write(file_fd, data, sizeof(uint32_t));
8374 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8375 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8376 written_len += write(file_fd, data, sizeof(uint32_t));
8377 meta.tuning_mod3_data_size = 0;
8378 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8379 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8380 written_len += write(file_fd, data, sizeof(uint32_t));
8381 size_t total_size = meta.tuning_sensor_data_size;
8382 data = (void *)((uint8_t *)&meta.data);
8383 written_len += write(file_fd, data, total_size);
8384 total_size = meta.tuning_vfe_data_size;
8385 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8386 written_len += write(file_fd, data, total_size);
8387 total_size = meta.tuning_cpp_data_size;
8388 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8389 written_len += write(file_fd, data, total_size);
8390 total_size = meta.tuning_cac_data_size;
8391 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8392 written_len += write(file_fd, data, total_size);
8393 close(file_fd);
8394 }else {
8395 LOGE("fail to open file for metadata dumping");
8396 }
8397 }
8398}
8399
8400/*===========================================================================
8401 * FUNCTION : cleanAndSortStreamInfo
8402 *
8403 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8404 * and sort them such that raw stream is at the end of the list
8405 * This is a workaround for camera daemon constraint.
8406 *
8407 * PARAMETERS : None
8408 *
8409 *==========================================================================*/
8410void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8411{
8412 List<stream_info_t *> newStreamInfo;
8413
8414 /*clean up invalid streams*/
8415 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8416 it != mStreamInfo.end();) {
8417 if(((*it)->status) == INVALID){
8418 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8419 delete channel;
8420 free(*it);
8421 it = mStreamInfo.erase(it);
8422 } else {
8423 it++;
8424 }
8425 }
8426
8427 // Move preview/video/callback/snapshot streams into newList
8428 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8429 it != mStreamInfo.end();) {
8430 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8431 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8432 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8433 newStreamInfo.push_back(*it);
8434 it = mStreamInfo.erase(it);
8435 } else
8436 it++;
8437 }
8438 // Move raw streams into newList
8439 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8440 it != mStreamInfo.end();) {
8441 newStreamInfo.push_back(*it);
8442 it = mStreamInfo.erase(it);
8443 }
8444
8445 mStreamInfo = newStreamInfo;
8446}
8447
8448/*===========================================================================
8449 * FUNCTION : extractJpegMetadata
8450 *
8451 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8452 * JPEG metadata is cached in HAL, and return as part of capture
8453 * result when metadata is returned from camera daemon.
8454 *
8455 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8456 * @request: capture request
8457 *
8458 *==========================================================================*/
8459void QCamera3HardwareInterface::extractJpegMetadata(
8460 CameraMetadata& jpegMetadata,
8461 const camera3_capture_request_t *request)
8462{
8463 CameraMetadata frame_settings;
8464 frame_settings = request->settings;
8465
8466 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8467 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8468 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8469 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8470
8471 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8472 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8473 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8474 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8475
8476 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8477 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8478 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8479 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8480
8481 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8482 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8483 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8484 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8485
8486 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8487 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8488 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8489 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8490
8491 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8492 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8493 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8494 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8495
8496 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8497 int32_t thumbnail_size[2];
8498 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8499 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8500 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8501 int32_t orientation =
8502 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008503 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008504 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8505 int32_t temp;
8506 temp = thumbnail_size[0];
8507 thumbnail_size[0] = thumbnail_size[1];
8508 thumbnail_size[1] = temp;
8509 }
8510 }
8511 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8512 thumbnail_size,
8513 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8514 }
8515
8516}
8517
8518/*===========================================================================
8519 * FUNCTION : convertToRegions
8520 *
8521 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8522 *
8523 * PARAMETERS :
8524 * @rect : cam_rect_t struct to convert
8525 * @region : int32_t destination array
8526 * @weight : if we are converting from cam_area_t, weight is valid
8527 * else weight = -1
8528 *
8529 *==========================================================================*/
8530void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8531 int32_t *region, int weight)
8532{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008533 region[FACE_LEFT] = rect.left;
8534 region[FACE_TOP] = rect.top;
8535 region[FACE_RIGHT] = rect.left + rect.width;
8536 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008537 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008538 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008539 }
8540}
8541
8542/*===========================================================================
8543 * FUNCTION : convertFromRegions
8544 *
8545 * DESCRIPTION: helper method to convert from array to cam_rect_t
8546 *
8547 * PARAMETERS :
8548 * @rect : cam_rect_t struct to convert
8549 * @region : int32_t destination array
8550 * @weight : if we are converting from cam_area_t, weight is valid
8551 * else weight = -1
8552 *
8553 *==========================================================================*/
8554void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008555 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008556{
Thierry Strudel3d639192016-09-09 11:52:26 -07008557 int32_t x_min = frame_settings.find(tag).data.i32[0];
8558 int32_t y_min = frame_settings.find(tag).data.i32[1];
8559 int32_t x_max = frame_settings.find(tag).data.i32[2];
8560 int32_t y_max = frame_settings.find(tag).data.i32[3];
8561 roi.weight = frame_settings.find(tag).data.i32[4];
8562 roi.rect.left = x_min;
8563 roi.rect.top = y_min;
8564 roi.rect.width = x_max - x_min;
8565 roi.rect.height = y_max - y_min;
8566}
8567
8568/*===========================================================================
8569 * FUNCTION : resetIfNeededROI
8570 *
8571 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8572 * crop region
8573 *
8574 * PARAMETERS :
8575 * @roi : cam_area_t struct to resize
8576 * @scalerCropRegion : cam_crop_region_t region to compare against
8577 *
8578 *
8579 *==========================================================================*/
8580bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8581 const cam_crop_region_t* scalerCropRegion)
8582{
8583 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8584 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8585 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8586 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8587
8588 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8589 * without having this check the calculations below to validate if the roi
8590 * is inside scalar crop region will fail resulting in the roi not being
8591 * reset causing algorithm to continue to use stale roi window
8592 */
8593 if (roi->weight == 0) {
8594 return true;
8595 }
8596
8597 if ((roi_x_max < scalerCropRegion->left) ||
8598 // right edge of roi window is left of scalar crop's left edge
8599 (roi_y_max < scalerCropRegion->top) ||
8600 // bottom edge of roi window is above scalar crop's top edge
8601 (roi->rect.left > crop_x_max) ||
8602 // left edge of roi window is beyond(right) of scalar crop's right edge
8603 (roi->rect.top > crop_y_max)){
8604 // top edge of roi windo is above scalar crop's top edge
8605 return false;
8606 }
8607 if (roi->rect.left < scalerCropRegion->left) {
8608 roi->rect.left = scalerCropRegion->left;
8609 }
8610 if (roi->rect.top < scalerCropRegion->top) {
8611 roi->rect.top = scalerCropRegion->top;
8612 }
8613 if (roi_x_max > crop_x_max) {
8614 roi_x_max = crop_x_max;
8615 }
8616 if (roi_y_max > crop_y_max) {
8617 roi_y_max = crop_y_max;
8618 }
8619 roi->rect.width = roi_x_max - roi->rect.left;
8620 roi->rect.height = roi_y_max - roi->rect.top;
8621 return true;
8622}
8623
8624/*===========================================================================
8625 * FUNCTION : convertLandmarks
8626 *
8627 * DESCRIPTION: helper method to extract the landmarks from face detection info
8628 *
8629 * PARAMETERS :
8630 * @landmark_data : input landmark data to be converted
8631 * @landmarks : int32_t destination array
8632 *
8633 *
8634 *==========================================================================*/
8635void QCamera3HardwareInterface::convertLandmarks(
8636 cam_face_landmarks_info_t landmark_data,
8637 int32_t *landmarks)
8638{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008639 if (landmark_data.is_left_eye_valid) {
8640 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8641 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8642 } else {
8643 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8644 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8645 }
8646
8647 if (landmark_data.is_right_eye_valid) {
8648 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8649 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8650 } else {
8651 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8652 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8653 }
8654
8655 if (landmark_data.is_mouth_valid) {
8656 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8657 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8658 } else {
8659 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8660 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8661 }
8662}
8663
8664/*===========================================================================
8665 * FUNCTION : setInvalidLandmarks
8666 *
8667 * DESCRIPTION: helper method to set invalid landmarks
8668 *
8669 * PARAMETERS :
8670 * @landmarks : int32_t destination array
8671 *
8672 *
8673 *==========================================================================*/
8674void QCamera3HardwareInterface::setInvalidLandmarks(
8675 int32_t *landmarks)
8676{
8677 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8678 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8679 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8680 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8681 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8682 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008683}
8684
8685#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008686
8687/*===========================================================================
8688 * FUNCTION : getCapabilities
8689 *
8690 * DESCRIPTION: query camera capability from back-end
8691 *
8692 * PARAMETERS :
8693 * @ops : mm-interface ops structure
8694 * @cam_handle : camera handle for which we need capability
8695 *
8696 * RETURN : ptr type of capability structure
8697 * capability for success
8698 * NULL for failure
8699 *==========================================================================*/
8700cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8701 uint32_t cam_handle)
8702{
8703 int rc = NO_ERROR;
8704 QCamera3HeapMemory *capabilityHeap = NULL;
8705 cam_capability_t *cap_ptr = NULL;
8706
8707 if (ops == NULL) {
8708 LOGE("Invalid arguments");
8709 return NULL;
8710 }
8711
8712 capabilityHeap = new QCamera3HeapMemory(1);
8713 if (capabilityHeap == NULL) {
8714 LOGE("creation of capabilityHeap failed");
8715 return NULL;
8716 }
8717
8718 /* Allocate memory for capability buffer */
8719 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8720 if(rc != OK) {
8721 LOGE("No memory for cappability");
8722 goto allocate_failed;
8723 }
8724
8725 /* Map memory for capability buffer */
8726 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8727
8728 rc = ops->map_buf(cam_handle,
8729 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8730 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8731 if(rc < 0) {
8732 LOGE("failed to map capability buffer");
8733 rc = FAILED_TRANSACTION;
8734 goto map_failed;
8735 }
8736
8737 /* Query Capability */
8738 rc = ops->query_capability(cam_handle);
8739 if(rc < 0) {
8740 LOGE("failed to query capability");
8741 rc = FAILED_TRANSACTION;
8742 goto query_failed;
8743 }
8744
8745 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8746 if (cap_ptr == NULL) {
8747 LOGE("out of memory");
8748 rc = NO_MEMORY;
8749 goto query_failed;
8750 }
8751
8752 memset(cap_ptr, 0, sizeof(cam_capability_t));
8753 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8754
8755 int index;
8756 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8757 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8758 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8759 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8760 }
8761
8762query_failed:
8763 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8764map_failed:
8765 capabilityHeap->deallocate();
8766allocate_failed:
8767 delete capabilityHeap;
8768
8769 if (rc != NO_ERROR) {
8770 return NULL;
8771 } else {
8772 return cap_ptr;
8773 }
8774}
8775
Thierry Strudel3d639192016-09-09 11:52:26 -07008776/*===========================================================================
8777 * FUNCTION : initCapabilities
8778 *
8779 * DESCRIPTION: initialize camera capabilities in static data struct
8780 *
8781 * PARAMETERS :
8782 * @cameraId : camera Id
8783 *
8784 * RETURN : int32_t type of status
8785 * NO_ERROR -- success
8786 * none-zero failure code
8787 *==========================================================================*/
8788int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8789{
8790 int rc = 0;
8791 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008792 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008793
8794 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8795 if (rc) {
8796 LOGE("camera_open failed. rc = %d", rc);
8797 goto open_failed;
8798 }
8799 if (!cameraHandle) {
8800 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8801 goto open_failed;
8802 }
8803
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008804 handle = get_main_camera_handle(cameraHandle->camera_handle);
8805 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8806 if (gCamCapability[cameraId] == NULL) {
8807 rc = FAILED_TRANSACTION;
8808 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008809 }
8810
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008811 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008812 if (is_dual_camera_by_idx(cameraId)) {
8813 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8814 gCamCapability[cameraId]->aux_cam_cap =
8815 getCapabilities(cameraHandle->ops, handle);
8816 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8817 rc = FAILED_TRANSACTION;
8818 free(gCamCapability[cameraId]);
8819 goto failed_op;
8820 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008821
8822 // Copy the main camera capability to main_cam_cap struct
8823 gCamCapability[cameraId]->main_cam_cap =
8824 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8825 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8826 LOGE("out of memory");
8827 rc = NO_MEMORY;
8828 goto failed_op;
8829 }
8830 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8831 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008832 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008833failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008834 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8835 cameraHandle = NULL;
8836open_failed:
8837 return rc;
8838}
8839
8840/*==========================================================================
8841 * FUNCTION : get3Aversion
8842 *
8843 * DESCRIPTION: get the Q3A S/W version
8844 *
8845 * PARAMETERS :
8846 * @sw_version: Reference of Q3A structure which will hold version info upon
8847 * return
8848 *
8849 * RETURN : None
8850 *
8851 *==========================================================================*/
8852void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8853{
8854 if(gCamCapability[mCameraId])
8855 sw_version = gCamCapability[mCameraId]->q3a_version;
8856 else
8857 LOGE("Capability structure NULL!");
8858}
8859
8860
8861/*===========================================================================
8862 * FUNCTION : initParameters
8863 *
8864 * DESCRIPTION: initialize camera parameters
8865 *
8866 * PARAMETERS :
8867 *
8868 * RETURN : int32_t type of status
8869 * NO_ERROR -- success
8870 * none-zero failure code
8871 *==========================================================================*/
8872int QCamera3HardwareInterface::initParameters()
8873{
8874 int rc = 0;
8875
8876 //Allocate Set Param Buffer
8877 mParamHeap = new QCamera3HeapMemory(1);
8878 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8879 if(rc != OK) {
8880 rc = NO_MEMORY;
8881 LOGE("Failed to allocate SETPARM Heap memory");
8882 delete mParamHeap;
8883 mParamHeap = NULL;
8884 return rc;
8885 }
8886
8887 //Map memory for parameters buffer
8888 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8889 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8890 mParamHeap->getFd(0),
8891 sizeof(metadata_buffer_t),
8892 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8893 if(rc < 0) {
8894 LOGE("failed to map SETPARM buffer");
8895 rc = FAILED_TRANSACTION;
8896 mParamHeap->deallocate();
8897 delete mParamHeap;
8898 mParamHeap = NULL;
8899 return rc;
8900 }
8901
8902 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8903
8904 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8905 return rc;
8906}
8907
8908/*===========================================================================
8909 * FUNCTION : deinitParameters
8910 *
8911 * DESCRIPTION: de-initialize camera parameters
8912 *
8913 * PARAMETERS :
8914 *
8915 * RETURN : NONE
8916 *==========================================================================*/
8917void QCamera3HardwareInterface::deinitParameters()
8918{
8919 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8920 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8921
8922 mParamHeap->deallocate();
8923 delete mParamHeap;
8924 mParamHeap = NULL;
8925
8926 mParameters = NULL;
8927
8928 free(mPrevParameters);
8929 mPrevParameters = NULL;
8930}
8931
8932/*===========================================================================
8933 * FUNCTION : calcMaxJpegSize
8934 *
8935 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8936 *
8937 * PARAMETERS :
8938 *
8939 * RETURN : max_jpeg_size
8940 *==========================================================================*/
8941size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8942{
8943 size_t max_jpeg_size = 0;
8944 size_t temp_width, temp_height;
8945 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8946 MAX_SIZES_CNT);
8947 for (size_t i = 0; i < count; i++) {
8948 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8949 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8950 if (temp_width * temp_height > max_jpeg_size ) {
8951 max_jpeg_size = temp_width * temp_height;
8952 }
8953 }
8954 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8955 return max_jpeg_size;
8956}
8957
8958/*===========================================================================
8959 * FUNCTION : getMaxRawSize
8960 *
8961 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8962 *
8963 * PARAMETERS :
8964 *
8965 * RETURN : Largest supported Raw Dimension
8966 *==========================================================================*/
8967cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8968{
8969 int max_width = 0;
8970 cam_dimension_t maxRawSize;
8971
8972 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8973 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8974 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8975 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8976 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8977 }
8978 }
8979 return maxRawSize;
8980}
8981
8982
8983/*===========================================================================
8984 * FUNCTION : calcMaxJpegDim
8985 *
8986 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8987 *
8988 * PARAMETERS :
8989 *
8990 * RETURN : max_jpeg_dim
8991 *==========================================================================*/
8992cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8993{
8994 cam_dimension_t max_jpeg_dim;
8995 cam_dimension_t curr_jpeg_dim;
8996 max_jpeg_dim.width = 0;
8997 max_jpeg_dim.height = 0;
8998 curr_jpeg_dim.width = 0;
8999 curr_jpeg_dim.height = 0;
9000 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9001 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9002 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9003 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9004 max_jpeg_dim.width * max_jpeg_dim.height ) {
9005 max_jpeg_dim.width = curr_jpeg_dim.width;
9006 max_jpeg_dim.height = curr_jpeg_dim.height;
9007 }
9008 }
9009 return max_jpeg_dim;
9010}
9011
9012/*===========================================================================
9013 * FUNCTION : addStreamConfig
9014 *
9015 * DESCRIPTION: adds the stream configuration to the array
9016 *
9017 * PARAMETERS :
9018 * @available_stream_configs : pointer to stream configuration array
9019 * @scalar_format : scalar format
9020 * @dim : configuration dimension
9021 * @config_type : input or output configuration type
9022 *
9023 * RETURN : NONE
9024 *==========================================================================*/
9025void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9026 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9027{
9028 available_stream_configs.add(scalar_format);
9029 available_stream_configs.add(dim.width);
9030 available_stream_configs.add(dim.height);
9031 available_stream_configs.add(config_type);
9032}
9033
9034/*===========================================================================
9035 * FUNCTION : suppportBurstCapture
9036 *
9037 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9038 *
9039 * PARAMETERS :
9040 * @cameraId : camera Id
9041 *
9042 * RETURN : true if camera supports BURST_CAPTURE
9043 * false otherwise
9044 *==========================================================================*/
9045bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9046{
9047 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9048 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9049 const int32_t highResWidth = 3264;
9050 const int32_t highResHeight = 2448;
9051
9052 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9053 // Maximum resolution images cannot be captured at >= 10fps
9054 // -> not supporting BURST_CAPTURE
9055 return false;
9056 }
9057
9058 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9059 // Maximum resolution images can be captured at >= 20fps
9060 // --> supporting BURST_CAPTURE
9061 return true;
9062 }
9063
9064 // Find the smallest highRes resolution, or largest resolution if there is none
9065 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9066 MAX_SIZES_CNT);
9067 size_t highRes = 0;
9068 while ((highRes + 1 < totalCnt) &&
9069 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9070 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9071 highResWidth * highResHeight)) {
9072 highRes++;
9073 }
9074 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9075 return true;
9076 } else {
9077 return false;
9078 }
9079}
9080
9081/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009082 * FUNCTION : getPDStatIndex
9083 *
9084 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9085 *
9086 * PARAMETERS :
9087 * @caps : camera capabilities
9088 *
9089 * RETURN : int32_t type
9090 * non-negative - on success
9091 * -1 - on failure
9092 *==========================================================================*/
9093int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9094 if (nullptr == caps) {
9095 return -1;
9096 }
9097
9098 uint32_t metaRawCount = caps->meta_raw_channel_count;
9099 int32_t ret = -1;
9100 for (size_t i = 0; i < metaRawCount; i++) {
9101 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9102 ret = i;
9103 break;
9104 }
9105 }
9106
9107 return ret;
9108}
9109
9110/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009111 * FUNCTION : initStaticMetadata
9112 *
9113 * DESCRIPTION: initialize the static metadata
9114 *
9115 * PARAMETERS :
9116 * @cameraId : camera Id
9117 *
9118 * RETURN : int32_t type of status
9119 * 0 -- success
9120 * non-zero failure code
9121 *==========================================================================*/
9122int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9123{
9124 int rc = 0;
9125 CameraMetadata staticInfo;
9126 size_t count = 0;
9127 bool limitedDevice = false;
9128 char prop[PROPERTY_VALUE_MAX];
9129 bool supportBurst = false;
9130
9131 supportBurst = supportBurstCapture(cameraId);
9132
9133 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9134 * guaranteed or if min fps of max resolution is less than 20 fps, its
9135 * advertised as limited device*/
9136 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9137 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9138 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9139 !supportBurst;
9140
9141 uint8_t supportedHwLvl = limitedDevice ?
9142 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009143#ifndef USE_HAL_3_3
9144 // LEVEL_3 - This device will support level 3.
9145 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9146#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009147 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009148#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009149
9150 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9151 &supportedHwLvl, 1);
9152
9153 bool facingBack = false;
9154 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9155 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9156 facingBack = true;
9157 }
9158 /*HAL 3 only*/
9159 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9160 &gCamCapability[cameraId]->min_focus_distance, 1);
9161
9162 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9163 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9164
9165 /*should be using focal lengths but sensor doesn't provide that info now*/
9166 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9167 &gCamCapability[cameraId]->focal_length,
9168 1);
9169
9170 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9171 gCamCapability[cameraId]->apertures,
9172 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9173
9174 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9175 gCamCapability[cameraId]->filter_densities,
9176 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9177
9178
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009179 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9180 size_t mode_count =
9181 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9182 for (size_t i = 0; i < mode_count; i++) {
9183 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9184 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009185 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009186 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009187
9188 int32_t lens_shading_map_size[] = {
9189 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9190 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9191 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9192 lens_shading_map_size,
9193 sizeof(lens_shading_map_size)/sizeof(int32_t));
9194
9195 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9196 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9197
9198 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9199 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9200
9201 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9202 &gCamCapability[cameraId]->max_frame_duration, 1);
9203
9204 camera_metadata_rational baseGainFactor = {
9205 gCamCapability[cameraId]->base_gain_factor.numerator,
9206 gCamCapability[cameraId]->base_gain_factor.denominator};
9207 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9208 &baseGainFactor, 1);
9209
9210 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9211 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9212
9213 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9214 gCamCapability[cameraId]->pixel_array_size.height};
9215 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9216 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9217
9218 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9219 gCamCapability[cameraId]->active_array_size.top,
9220 gCamCapability[cameraId]->active_array_size.width,
9221 gCamCapability[cameraId]->active_array_size.height};
9222 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9223 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9224
9225 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9226 &gCamCapability[cameraId]->white_level, 1);
9227
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009228 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9229 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9230 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009231 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009232 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009233
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009234#ifndef USE_HAL_3_3
9235 bool hasBlackRegions = false;
9236 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9237 LOGW("black_region_count: %d is bounded to %d",
9238 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9239 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9240 }
9241 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9242 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9243 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9244 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9245 }
9246 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9247 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9248 hasBlackRegions = true;
9249 }
9250#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009251 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9252 &gCamCapability[cameraId]->flash_charge_duration, 1);
9253
9254 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9255 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9256
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009257 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9258 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9259 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009260 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9261 &timestampSource, 1);
9262
Thierry Strudel54dc9782017-02-15 12:12:10 -08009263 //update histogram vendor data
9264 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009265 &gCamCapability[cameraId]->histogram_size, 1);
9266
Thierry Strudel54dc9782017-02-15 12:12:10 -08009267 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009268 &gCamCapability[cameraId]->max_histogram_count, 1);
9269
Shuzhen Wang14415f52016-11-16 18:26:18 -08009270 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9271 //so that app can request fewer number of bins than the maximum supported.
9272 std::vector<int32_t> histBins;
9273 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9274 histBins.push_back(maxHistBins);
9275 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9276 (maxHistBins & 0x1) == 0) {
9277 histBins.push_back(maxHistBins >> 1);
9278 maxHistBins >>= 1;
9279 }
9280 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9281 histBins.data(), histBins.size());
9282
Thierry Strudel3d639192016-09-09 11:52:26 -07009283 int32_t sharpness_map_size[] = {
9284 gCamCapability[cameraId]->sharpness_map_size.width,
9285 gCamCapability[cameraId]->sharpness_map_size.height};
9286
9287 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9288 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9289
9290 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9291 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9292
Emilian Peev0f3c3162017-03-15 12:57:46 +00009293 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9294 if (0 <= indexPD) {
9295 // Advertise PD stats data as part of the Depth capabilities
9296 int32_t depthWidth =
9297 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9298 int32_t depthHeight =
9299 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009300 int32_t depthStride =
9301 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009302 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9303 assert(0 < depthSamplesCount);
9304 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9305 &depthSamplesCount, 1);
9306
9307 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9308 depthHeight,
9309 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9310 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9311 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9312 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9313 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9314
9315 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9316 depthHeight, 33333333,
9317 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9318 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9319 depthMinDuration,
9320 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9321
9322 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9323 depthHeight, 0,
9324 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9325 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9326 depthStallDuration,
9327 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9328
9329 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9330 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009331
9332 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9333 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9334 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009335 }
9336
Thierry Strudel3d639192016-09-09 11:52:26 -07009337 int32_t scalar_formats[] = {
9338 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9339 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9340 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9341 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9342 HAL_PIXEL_FORMAT_RAW10,
9343 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009344 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9345 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9346 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009347
9348 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9349 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9350 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9351 count, MAX_SIZES_CNT, available_processed_sizes);
9352 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9353 available_processed_sizes, count * 2);
9354
9355 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9356 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9357 makeTable(gCamCapability[cameraId]->raw_dim,
9358 count, MAX_SIZES_CNT, available_raw_sizes);
9359 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9360 available_raw_sizes, count * 2);
9361
9362 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9363 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9364 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9365 count, MAX_SIZES_CNT, available_fps_ranges);
9366 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9367 available_fps_ranges, count * 2);
9368
9369 camera_metadata_rational exposureCompensationStep = {
9370 gCamCapability[cameraId]->exp_compensation_step.numerator,
9371 gCamCapability[cameraId]->exp_compensation_step.denominator};
9372 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9373 &exposureCompensationStep, 1);
9374
9375 Vector<uint8_t> availableVstabModes;
9376 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9377 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009378 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009379 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009380 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009381 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009382 count = IS_TYPE_MAX;
9383 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9384 for (size_t i = 0; i < count; i++) {
9385 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9386 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9387 eisSupported = true;
9388 break;
9389 }
9390 }
9391 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009392 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9393 }
9394 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9395 availableVstabModes.array(), availableVstabModes.size());
9396
9397 /*HAL 1 and HAL 3 common*/
9398 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9399 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9400 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009401 // Cap the max zoom to the max preferred value
9402 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009403 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9404 &maxZoom, 1);
9405
9406 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9407 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9408
9409 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9410 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9411 max3aRegions[2] = 0; /* AF not supported */
9412 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9413 max3aRegions, 3);
9414
9415 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9416 memset(prop, 0, sizeof(prop));
9417 property_get("persist.camera.facedetect", prop, "1");
9418 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9419 LOGD("Support face detection mode: %d",
9420 supportedFaceDetectMode);
9421
9422 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009423 /* support mode should be OFF if max number of face is 0 */
9424 if (maxFaces <= 0) {
9425 supportedFaceDetectMode = 0;
9426 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009427 Vector<uint8_t> availableFaceDetectModes;
9428 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9429 if (supportedFaceDetectMode == 1) {
9430 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9431 } else if (supportedFaceDetectMode == 2) {
9432 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9433 } else if (supportedFaceDetectMode == 3) {
9434 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9435 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9436 } else {
9437 maxFaces = 0;
9438 }
9439 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9440 availableFaceDetectModes.array(),
9441 availableFaceDetectModes.size());
9442 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9443 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009444 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9445 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9446 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009447
9448 int32_t exposureCompensationRange[] = {
9449 gCamCapability[cameraId]->exposure_compensation_min,
9450 gCamCapability[cameraId]->exposure_compensation_max};
9451 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9452 exposureCompensationRange,
9453 sizeof(exposureCompensationRange)/sizeof(int32_t));
9454
9455 uint8_t lensFacing = (facingBack) ?
9456 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9457 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9458
9459 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9460 available_thumbnail_sizes,
9461 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9462
9463 /*all sizes will be clubbed into this tag*/
9464 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9465 /*android.scaler.availableStreamConfigurations*/
9466 Vector<int32_t> available_stream_configs;
9467 cam_dimension_t active_array_dim;
9468 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9469 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009470
9471 /*advertise list of input dimensions supported based on below property.
9472 By default all sizes upto 5MP will be advertised.
9473 Note that the setprop resolution format should be WxH.
9474 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9475 To list all supported sizes, setprop needs to be set with "0x0" */
9476 cam_dimension_t minInputSize = {2592,1944}; //5MP
9477 memset(prop, 0, sizeof(prop));
9478 property_get("persist.camera.input.minsize", prop, "2592x1944");
9479 if (strlen(prop) > 0) {
9480 char *saveptr = NULL;
9481 char *token = strtok_r(prop, "x", &saveptr);
9482 if (token != NULL) {
9483 minInputSize.width = atoi(token);
9484 }
9485 token = strtok_r(NULL, "x", &saveptr);
9486 if (token != NULL) {
9487 minInputSize.height = atoi(token);
9488 }
9489 }
9490
Thierry Strudel3d639192016-09-09 11:52:26 -07009491 /* Add input/output stream configurations for each scalar formats*/
9492 for (size_t j = 0; j < scalar_formats_count; j++) {
9493 switch (scalar_formats[j]) {
9494 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9495 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9496 case HAL_PIXEL_FORMAT_RAW10:
9497 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9498 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9499 addStreamConfig(available_stream_configs, scalar_formats[j],
9500 gCamCapability[cameraId]->raw_dim[i],
9501 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9502 }
9503 break;
9504 case HAL_PIXEL_FORMAT_BLOB:
9505 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9506 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9507 addStreamConfig(available_stream_configs, scalar_formats[j],
9508 gCamCapability[cameraId]->picture_sizes_tbl[i],
9509 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9510 }
9511 break;
9512 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9513 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9514 default:
9515 cam_dimension_t largest_picture_size;
9516 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9517 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9518 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9519 addStreamConfig(available_stream_configs, scalar_formats[j],
9520 gCamCapability[cameraId]->picture_sizes_tbl[i],
9521 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009522 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009523 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9524 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009525 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9526 >= minInputSize.width) || (gCamCapability[cameraId]->
9527 picture_sizes_tbl[i].height >= minInputSize.height)) {
9528 addStreamConfig(available_stream_configs, scalar_formats[j],
9529 gCamCapability[cameraId]->picture_sizes_tbl[i],
9530 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9531 }
9532 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009533 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009534
Thierry Strudel3d639192016-09-09 11:52:26 -07009535 break;
9536 }
9537 }
9538
9539 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9540 available_stream_configs.array(), available_stream_configs.size());
9541 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9542 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9543
9544 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9545 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9546
9547 /* android.scaler.availableMinFrameDurations */
9548 Vector<int64_t> available_min_durations;
9549 for (size_t j = 0; j < scalar_formats_count; j++) {
9550 switch (scalar_formats[j]) {
9551 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9552 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9553 case HAL_PIXEL_FORMAT_RAW10:
9554 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9555 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9556 available_min_durations.add(scalar_formats[j]);
9557 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9558 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9559 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9560 }
9561 break;
9562 default:
9563 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9564 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9565 available_min_durations.add(scalar_formats[j]);
9566 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9567 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9568 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9569 }
9570 break;
9571 }
9572 }
9573 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9574 available_min_durations.array(), available_min_durations.size());
9575
9576 Vector<int32_t> available_hfr_configs;
9577 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9578 int32_t fps = 0;
9579 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9580 case CAM_HFR_MODE_60FPS:
9581 fps = 60;
9582 break;
9583 case CAM_HFR_MODE_90FPS:
9584 fps = 90;
9585 break;
9586 case CAM_HFR_MODE_120FPS:
9587 fps = 120;
9588 break;
9589 case CAM_HFR_MODE_150FPS:
9590 fps = 150;
9591 break;
9592 case CAM_HFR_MODE_180FPS:
9593 fps = 180;
9594 break;
9595 case CAM_HFR_MODE_210FPS:
9596 fps = 210;
9597 break;
9598 case CAM_HFR_MODE_240FPS:
9599 fps = 240;
9600 break;
9601 case CAM_HFR_MODE_480FPS:
9602 fps = 480;
9603 break;
9604 case CAM_HFR_MODE_OFF:
9605 case CAM_HFR_MODE_MAX:
9606 default:
9607 break;
9608 }
9609
9610 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9611 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9612 /* For each HFR frame rate, need to advertise one variable fps range
9613 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9614 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9615 * set by the app. When video recording is started, [120, 120] is
9616 * set. This way sensor configuration does not change when recording
9617 * is started */
9618
9619 /* (width, height, fps_min, fps_max, batch_size_max) */
9620 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9621 j < MAX_SIZES_CNT; j++) {
9622 available_hfr_configs.add(
9623 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9624 available_hfr_configs.add(
9625 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9626 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9627 available_hfr_configs.add(fps);
9628 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9629
9630 /* (width, height, fps_min, fps_max, batch_size_max) */
9631 available_hfr_configs.add(
9632 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9633 available_hfr_configs.add(
9634 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9635 available_hfr_configs.add(fps);
9636 available_hfr_configs.add(fps);
9637 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9638 }
9639 }
9640 }
9641 //Advertise HFR capability only if the property is set
9642 memset(prop, 0, sizeof(prop));
9643 property_get("persist.camera.hal3hfr.enable", prop, "1");
9644 uint8_t hfrEnable = (uint8_t)atoi(prop);
9645
9646 if(hfrEnable && available_hfr_configs.array()) {
9647 staticInfo.update(
9648 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9649 available_hfr_configs.array(), available_hfr_configs.size());
9650 }
9651
9652 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9653 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9654 &max_jpeg_size, 1);
9655
9656 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9657 size_t size = 0;
9658 count = CAM_EFFECT_MODE_MAX;
9659 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9660 for (size_t i = 0; i < count; i++) {
9661 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9662 gCamCapability[cameraId]->supported_effects[i]);
9663 if (NAME_NOT_FOUND != val) {
9664 avail_effects[size] = (uint8_t)val;
9665 size++;
9666 }
9667 }
9668 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9669 avail_effects,
9670 size);
9671
9672 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9673 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9674 size_t supported_scene_modes_cnt = 0;
9675 count = CAM_SCENE_MODE_MAX;
9676 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9677 for (size_t i = 0; i < count; i++) {
9678 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9679 CAM_SCENE_MODE_OFF) {
9680 int val = lookupFwkName(SCENE_MODES_MAP,
9681 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9682 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009683
Thierry Strudel3d639192016-09-09 11:52:26 -07009684 if (NAME_NOT_FOUND != val) {
9685 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9686 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9687 supported_scene_modes_cnt++;
9688 }
9689 }
9690 }
9691 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9692 avail_scene_modes,
9693 supported_scene_modes_cnt);
9694
9695 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9696 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9697 supported_scene_modes_cnt,
9698 CAM_SCENE_MODE_MAX,
9699 scene_mode_overrides,
9700 supported_indexes,
9701 cameraId);
9702
9703 if (supported_scene_modes_cnt == 0) {
9704 supported_scene_modes_cnt = 1;
9705 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9706 }
9707
9708 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9709 scene_mode_overrides, supported_scene_modes_cnt * 3);
9710
9711 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9712 ANDROID_CONTROL_MODE_AUTO,
9713 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9714 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9715 available_control_modes,
9716 3);
9717
9718 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9719 size = 0;
9720 count = CAM_ANTIBANDING_MODE_MAX;
9721 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9722 for (size_t i = 0; i < count; i++) {
9723 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9724 gCamCapability[cameraId]->supported_antibandings[i]);
9725 if (NAME_NOT_FOUND != val) {
9726 avail_antibanding_modes[size] = (uint8_t)val;
9727 size++;
9728 }
9729
9730 }
9731 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9732 avail_antibanding_modes,
9733 size);
9734
9735 uint8_t avail_abberation_modes[] = {
9736 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9737 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9738 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9739 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9740 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9741 if (0 == count) {
9742 // If no aberration correction modes are available for a device, this advertise OFF mode
9743 size = 1;
9744 } else {
9745 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9746 // So, advertize all 3 modes if atleast any one mode is supported as per the
9747 // new M requirement
9748 size = 3;
9749 }
9750 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9751 avail_abberation_modes,
9752 size);
9753
9754 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9755 size = 0;
9756 count = CAM_FOCUS_MODE_MAX;
9757 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9758 for (size_t i = 0; i < count; i++) {
9759 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9760 gCamCapability[cameraId]->supported_focus_modes[i]);
9761 if (NAME_NOT_FOUND != val) {
9762 avail_af_modes[size] = (uint8_t)val;
9763 size++;
9764 }
9765 }
9766 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9767 avail_af_modes,
9768 size);
9769
9770 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9771 size = 0;
9772 count = CAM_WB_MODE_MAX;
9773 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9774 for (size_t i = 0; i < count; i++) {
9775 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9776 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9777 gCamCapability[cameraId]->supported_white_balances[i]);
9778 if (NAME_NOT_FOUND != val) {
9779 avail_awb_modes[size] = (uint8_t)val;
9780 size++;
9781 }
9782 }
9783 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9784 avail_awb_modes,
9785 size);
9786
9787 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9788 count = CAM_FLASH_FIRING_LEVEL_MAX;
9789 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9790 count);
9791 for (size_t i = 0; i < count; i++) {
9792 available_flash_levels[i] =
9793 gCamCapability[cameraId]->supported_firing_levels[i];
9794 }
9795 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9796 available_flash_levels, count);
9797
9798 uint8_t flashAvailable;
9799 if (gCamCapability[cameraId]->flash_available)
9800 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9801 else
9802 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9803 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9804 &flashAvailable, 1);
9805
9806 Vector<uint8_t> avail_ae_modes;
9807 count = CAM_AE_MODE_MAX;
9808 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9809 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009810 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9811 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9812 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9813 }
9814 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009815 }
9816 if (flashAvailable) {
9817 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9818 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9819 }
9820 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9821 avail_ae_modes.array(),
9822 avail_ae_modes.size());
9823
9824 int32_t sensitivity_range[2];
9825 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9826 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9827 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9828 sensitivity_range,
9829 sizeof(sensitivity_range) / sizeof(int32_t));
9830
9831 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9832 &gCamCapability[cameraId]->max_analog_sensitivity,
9833 1);
9834
9835 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9836 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9837 &sensor_orientation,
9838 1);
9839
9840 int32_t max_output_streams[] = {
9841 MAX_STALLING_STREAMS,
9842 MAX_PROCESSED_STREAMS,
9843 MAX_RAW_STREAMS};
9844 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9845 max_output_streams,
9846 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9847
9848 uint8_t avail_leds = 0;
9849 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9850 &avail_leds, 0);
9851
9852 uint8_t focus_dist_calibrated;
9853 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9854 gCamCapability[cameraId]->focus_dist_calibrated);
9855 if (NAME_NOT_FOUND != val) {
9856 focus_dist_calibrated = (uint8_t)val;
9857 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9858 &focus_dist_calibrated, 1);
9859 }
9860
9861 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9862 size = 0;
9863 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9864 MAX_TEST_PATTERN_CNT);
9865 for (size_t i = 0; i < count; i++) {
9866 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9867 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9868 if (NAME_NOT_FOUND != testpatternMode) {
9869 avail_testpattern_modes[size] = testpatternMode;
9870 size++;
9871 }
9872 }
9873 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9874 avail_testpattern_modes,
9875 size);
9876
9877 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9878 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9879 &max_pipeline_depth,
9880 1);
9881
9882 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9883 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9884 &partial_result_count,
9885 1);
9886
9887 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9888 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9889
9890 Vector<uint8_t> available_capabilities;
9891 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9892 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9893 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9894 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9895 if (supportBurst) {
9896 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9897 }
9898 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9899 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9900 if (hfrEnable && available_hfr_configs.array()) {
9901 available_capabilities.add(
9902 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9903 }
9904
9905 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9906 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9907 }
9908 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9909 available_capabilities.array(),
9910 available_capabilities.size());
9911
9912 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9913 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9914 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9915 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9916
9917 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9918 &aeLockAvailable, 1);
9919
9920 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9921 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9922 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9923 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9924
9925 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9926 &awbLockAvailable, 1);
9927
9928 int32_t max_input_streams = 1;
9929 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9930 &max_input_streams,
9931 1);
9932
9933 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9934 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9935 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9936 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9937 HAL_PIXEL_FORMAT_YCbCr_420_888};
9938 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9939 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9940
9941 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9942 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9943 &max_latency,
9944 1);
9945
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009946#ifndef USE_HAL_3_3
9947 int32_t isp_sensitivity_range[2];
9948 isp_sensitivity_range[0] =
9949 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9950 isp_sensitivity_range[1] =
9951 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9952 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9953 isp_sensitivity_range,
9954 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9955#endif
9956
Thierry Strudel3d639192016-09-09 11:52:26 -07009957 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9958 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9959 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9960 available_hot_pixel_modes,
9961 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9962
9963 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9964 ANDROID_SHADING_MODE_FAST,
9965 ANDROID_SHADING_MODE_HIGH_QUALITY};
9966 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9967 available_shading_modes,
9968 3);
9969
9970 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9971 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9972 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9973 available_lens_shading_map_modes,
9974 2);
9975
9976 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9977 ANDROID_EDGE_MODE_FAST,
9978 ANDROID_EDGE_MODE_HIGH_QUALITY,
9979 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9980 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9981 available_edge_modes,
9982 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9983
9984 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9985 ANDROID_NOISE_REDUCTION_MODE_FAST,
9986 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9987 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9988 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9989 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9990 available_noise_red_modes,
9991 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9992
9993 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9994 ANDROID_TONEMAP_MODE_FAST,
9995 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9996 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9997 available_tonemap_modes,
9998 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9999
10000 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10001 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10002 available_hot_pixel_map_modes,
10003 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10004
10005 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10006 gCamCapability[cameraId]->reference_illuminant1);
10007 if (NAME_NOT_FOUND != val) {
10008 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10009 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10010 }
10011
10012 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10013 gCamCapability[cameraId]->reference_illuminant2);
10014 if (NAME_NOT_FOUND != val) {
10015 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10016 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10017 }
10018
10019 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10020 (void *)gCamCapability[cameraId]->forward_matrix1,
10021 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10022
10023 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10024 (void *)gCamCapability[cameraId]->forward_matrix2,
10025 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10026
10027 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10028 (void *)gCamCapability[cameraId]->color_transform1,
10029 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10030
10031 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10032 (void *)gCamCapability[cameraId]->color_transform2,
10033 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10034
10035 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10036 (void *)gCamCapability[cameraId]->calibration_transform1,
10037 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10038
10039 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10040 (void *)gCamCapability[cameraId]->calibration_transform2,
10041 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10042
10043 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10044 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10045 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10046 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10047 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10048 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10049 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10050 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10051 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10052 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10053 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10054 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10055 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10056 ANDROID_JPEG_GPS_COORDINATES,
10057 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10058 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10059 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10060 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10061 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10062 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10063 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10064 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10065 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10066 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010067#ifndef USE_HAL_3_3
10068 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10069#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010070 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010071 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010072 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10073 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010074 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010075 /* DevCamDebug metadata request_keys_basic */
10076 DEVCAMDEBUG_META_ENABLE,
10077 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010078 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010079 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010080 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010081 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010082 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010083 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010084
10085 size_t request_keys_cnt =
10086 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10087 Vector<int32_t> available_request_keys;
10088 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10089 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10090 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10091 }
10092
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010093 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010094 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10095 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10096 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010097 }
10098
Thierry Strudel3d639192016-09-09 11:52:26 -070010099 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10100 available_request_keys.array(), available_request_keys.size());
10101
10102 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10103 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10104 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10105 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10106 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10107 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10108 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10109 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10110 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10111 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10112 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10113 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10114 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10115 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10116 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10117 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10118 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010119 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010120 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10121 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10122 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010123 ANDROID_STATISTICS_FACE_SCORES,
10124#ifndef USE_HAL_3_3
10125 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10126#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010127 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010128 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010129 // DevCamDebug metadata result_keys_basic
10130 DEVCAMDEBUG_META_ENABLE,
10131 // DevCamDebug metadata result_keys AF
10132 DEVCAMDEBUG_AF_LENS_POSITION,
10133 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10134 DEVCAMDEBUG_AF_TOF_DISTANCE,
10135 DEVCAMDEBUG_AF_LUMA,
10136 DEVCAMDEBUG_AF_HAF_STATE,
10137 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10138 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10139 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10140 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10141 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10142 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10143 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10144 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10145 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10146 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10147 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10148 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10149 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10150 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10151 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10152 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10153 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10154 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10155 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10156 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10157 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10158 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10159 // DevCamDebug metadata result_keys AEC
10160 DEVCAMDEBUG_AEC_TARGET_LUMA,
10161 DEVCAMDEBUG_AEC_COMP_LUMA,
10162 DEVCAMDEBUG_AEC_AVG_LUMA,
10163 DEVCAMDEBUG_AEC_CUR_LUMA,
10164 DEVCAMDEBUG_AEC_LINECOUNT,
10165 DEVCAMDEBUG_AEC_REAL_GAIN,
10166 DEVCAMDEBUG_AEC_EXP_INDEX,
10167 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010168 // DevCamDebug metadata result_keys zzHDR
10169 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10170 DEVCAMDEBUG_AEC_L_LINECOUNT,
10171 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10172 DEVCAMDEBUG_AEC_S_LINECOUNT,
10173 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10174 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10175 // DevCamDebug metadata result_keys ADRC
10176 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10177 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10178 DEVCAMDEBUG_AEC_GTM_RATIO,
10179 DEVCAMDEBUG_AEC_LTM_RATIO,
10180 DEVCAMDEBUG_AEC_LA_RATIO,
10181 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010182 // DevCamDebug metadata result_keys AWB
10183 DEVCAMDEBUG_AWB_R_GAIN,
10184 DEVCAMDEBUG_AWB_G_GAIN,
10185 DEVCAMDEBUG_AWB_B_GAIN,
10186 DEVCAMDEBUG_AWB_CCT,
10187 DEVCAMDEBUG_AWB_DECISION,
10188 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010189 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10190 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10191 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010192 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010193 };
10194
Thierry Strudel3d639192016-09-09 11:52:26 -070010195 size_t result_keys_cnt =
10196 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10197
10198 Vector<int32_t> available_result_keys;
10199 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10200 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10201 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10202 }
10203 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10204 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10205 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10206 }
10207 if (supportedFaceDetectMode == 1) {
10208 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10209 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10210 } else if ((supportedFaceDetectMode == 2) ||
10211 (supportedFaceDetectMode == 3)) {
10212 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10213 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10214 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010215#ifndef USE_HAL_3_3
10216 if (hasBlackRegions) {
10217 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10218 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10219 }
10220#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010221
10222 if (gExposeEnableZslKey) {
10223 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10224 }
10225
Thierry Strudel3d639192016-09-09 11:52:26 -070010226 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10227 available_result_keys.array(), available_result_keys.size());
10228
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010229 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010230 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10231 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10232 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10233 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10234 ANDROID_SCALER_CROPPING_TYPE,
10235 ANDROID_SYNC_MAX_LATENCY,
10236 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10237 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10238 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10239 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10240 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10241 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10242 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10243 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10244 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10245 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10246 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10247 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10248 ANDROID_LENS_FACING,
10249 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10250 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10251 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10252 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10253 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10254 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10255 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10256 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10257 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10258 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10259 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10260 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10261 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10262 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10263 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10264 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10265 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10266 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10267 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10268 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010269 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010270 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10271 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10272 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10273 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10274 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10275 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10276 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10277 ANDROID_CONTROL_AVAILABLE_MODES,
10278 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10279 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10280 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10281 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010282 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10283#ifndef USE_HAL_3_3
10284 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10285 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10286#endif
10287 };
10288
10289 Vector<int32_t> available_characteristics_keys;
10290 available_characteristics_keys.appendArray(characteristics_keys_basic,
10291 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10292#ifndef USE_HAL_3_3
10293 if (hasBlackRegions) {
10294 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10295 }
10296#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010297
10298 if (0 <= indexPD) {
10299 int32_t depthKeys[] = {
10300 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10301 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10302 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10303 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10304 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10305 };
10306 available_characteristics_keys.appendArray(depthKeys,
10307 sizeof(depthKeys) / sizeof(depthKeys[0]));
10308 }
10309
Thierry Strudel3d639192016-09-09 11:52:26 -070010310 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010311 available_characteristics_keys.array(),
10312 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010313
10314 /*available stall durations depend on the hw + sw and will be different for different devices */
10315 /*have to add for raw after implementation*/
10316 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10317 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10318
10319 Vector<int64_t> available_stall_durations;
10320 for (uint32_t j = 0; j < stall_formats_count; j++) {
10321 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10322 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10323 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10324 available_stall_durations.add(stall_formats[j]);
10325 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10326 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10327 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10328 }
10329 } else {
10330 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10331 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10332 available_stall_durations.add(stall_formats[j]);
10333 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10334 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10335 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10336 }
10337 }
10338 }
10339 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10340 available_stall_durations.array(),
10341 available_stall_durations.size());
10342
10343 //QCAMERA3_OPAQUE_RAW
10344 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10345 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10346 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10347 case LEGACY_RAW:
10348 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10349 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10350 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10351 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10352 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10353 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10354 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10355 break;
10356 case MIPI_RAW:
10357 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10358 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10359 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10360 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10361 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10362 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10363 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10364 break;
10365 default:
10366 LOGE("unknown opaque_raw_format %d",
10367 gCamCapability[cameraId]->opaque_raw_fmt);
10368 break;
10369 }
10370 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10371
10372 Vector<int32_t> strides;
10373 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10374 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10375 cam_stream_buf_plane_info_t buf_planes;
10376 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10377 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10378 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10379 &gCamCapability[cameraId]->padding_info, &buf_planes);
10380 strides.add(buf_planes.plane_info.mp[0].stride);
10381 }
10382 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10383 strides.size());
10384
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010385 //TBD: remove the following line once backend advertises zzHDR in feature mask
10386 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010387 //Video HDR default
10388 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10389 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010390 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010391 int32_t vhdr_mode[] = {
10392 QCAMERA3_VIDEO_HDR_MODE_OFF,
10393 QCAMERA3_VIDEO_HDR_MODE_ON};
10394
10395 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10396 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10397 vhdr_mode, vhdr_mode_count);
10398 }
10399
Thierry Strudel3d639192016-09-09 11:52:26 -070010400 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10401 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10402 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10403
10404 uint8_t isMonoOnly =
10405 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10406 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10407 &isMonoOnly, 1);
10408
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010409#ifndef USE_HAL_3_3
10410 Vector<int32_t> opaque_size;
10411 for (size_t j = 0; j < scalar_formats_count; j++) {
10412 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10413 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10414 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10415 cam_stream_buf_plane_info_t buf_planes;
10416
10417 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10418 &gCamCapability[cameraId]->padding_info, &buf_planes);
10419
10420 if (rc == 0) {
10421 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10422 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10423 opaque_size.add(buf_planes.plane_info.frame_len);
10424 }else {
10425 LOGE("raw frame calculation failed!");
10426 }
10427 }
10428 }
10429 }
10430
10431 if ((opaque_size.size() > 0) &&
10432 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10433 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10434 else
10435 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10436#endif
10437
Thierry Strudel04e026f2016-10-10 11:27:36 -070010438 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10439 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10440 size = 0;
10441 count = CAM_IR_MODE_MAX;
10442 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10443 for (size_t i = 0; i < count; i++) {
10444 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10445 gCamCapability[cameraId]->supported_ir_modes[i]);
10446 if (NAME_NOT_FOUND != val) {
10447 avail_ir_modes[size] = (int32_t)val;
10448 size++;
10449 }
10450 }
10451 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10452 avail_ir_modes, size);
10453 }
10454
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010455 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10456 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10457 size = 0;
10458 count = CAM_AEC_CONVERGENCE_MAX;
10459 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10460 for (size_t i = 0; i < count; i++) {
10461 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10462 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10463 if (NAME_NOT_FOUND != val) {
10464 available_instant_aec_modes[size] = (int32_t)val;
10465 size++;
10466 }
10467 }
10468 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10469 available_instant_aec_modes, size);
10470 }
10471
Thierry Strudel54dc9782017-02-15 12:12:10 -080010472 int32_t sharpness_range[] = {
10473 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10474 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10475 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10476
10477 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10478 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10479 size = 0;
10480 count = CAM_BINNING_CORRECTION_MODE_MAX;
10481 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10482 for (size_t i = 0; i < count; i++) {
10483 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10484 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10485 gCamCapability[cameraId]->supported_binning_modes[i]);
10486 if (NAME_NOT_FOUND != val) {
10487 avail_binning_modes[size] = (int32_t)val;
10488 size++;
10489 }
10490 }
10491 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10492 avail_binning_modes, size);
10493 }
10494
10495 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10496 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10497 size = 0;
10498 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10499 for (size_t i = 0; i < count; i++) {
10500 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10501 gCamCapability[cameraId]->supported_aec_modes[i]);
10502 if (NAME_NOT_FOUND != val)
10503 available_aec_modes[size++] = val;
10504 }
10505 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10506 available_aec_modes, size);
10507 }
10508
10509 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10510 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10511 size = 0;
10512 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10513 for (size_t i = 0; i < count; i++) {
10514 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10515 gCamCapability[cameraId]->supported_iso_modes[i]);
10516 if (NAME_NOT_FOUND != val)
10517 available_iso_modes[size++] = val;
10518 }
10519 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10520 available_iso_modes, size);
10521 }
10522
10523 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010524 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010525 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10526 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10527 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10528
10529 int32_t available_saturation_range[4];
10530 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10531 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10532 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10533 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10534 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10535 available_saturation_range, 4);
10536
10537 uint8_t is_hdr_values[2];
10538 is_hdr_values[0] = 0;
10539 is_hdr_values[1] = 1;
10540 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10541 is_hdr_values, 2);
10542
10543 float is_hdr_confidence_range[2];
10544 is_hdr_confidence_range[0] = 0.0;
10545 is_hdr_confidence_range[1] = 1.0;
10546 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10547 is_hdr_confidence_range, 2);
10548
Emilian Peev0a972ef2017-03-16 10:25:53 +000010549 size_t eepromLength = strnlen(
10550 reinterpret_cast<const char *>(
10551 gCamCapability[cameraId]->eeprom_version_info),
10552 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10553 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010554 char easelInfo[] = ",E:N";
10555 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10556 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10557 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010558 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10559 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010560 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010561 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10562 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10563 }
10564
Thierry Strudel3d639192016-09-09 11:52:26 -070010565 gStaticMetadata[cameraId] = staticInfo.release();
10566 return rc;
10567}
10568
10569/*===========================================================================
10570 * FUNCTION : makeTable
10571 *
10572 * DESCRIPTION: make a table of sizes
10573 *
10574 * PARAMETERS :
10575 *
10576 *
10577 *==========================================================================*/
10578void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10579 size_t max_size, int32_t *sizeTable)
10580{
10581 size_t j = 0;
10582 if (size > max_size) {
10583 size = max_size;
10584 }
10585 for (size_t i = 0; i < size; i++) {
10586 sizeTable[j] = dimTable[i].width;
10587 sizeTable[j+1] = dimTable[i].height;
10588 j+=2;
10589 }
10590}
10591
10592/*===========================================================================
10593 * FUNCTION : makeFPSTable
10594 *
10595 * DESCRIPTION: make a table of fps ranges
10596 *
10597 * PARAMETERS :
10598 *
10599 *==========================================================================*/
10600void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10601 size_t max_size, int32_t *fpsRangesTable)
10602{
10603 size_t j = 0;
10604 if (size > max_size) {
10605 size = max_size;
10606 }
10607 for (size_t i = 0; i < size; i++) {
10608 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10609 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10610 j+=2;
10611 }
10612}
10613
10614/*===========================================================================
10615 * FUNCTION : makeOverridesList
10616 *
10617 * DESCRIPTION: make a list of scene mode overrides
10618 *
10619 * PARAMETERS :
10620 *
10621 *
10622 *==========================================================================*/
10623void QCamera3HardwareInterface::makeOverridesList(
10624 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10625 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10626{
10627 /*daemon will give a list of overrides for all scene modes.
10628 However we should send the fwk only the overrides for the scene modes
10629 supported by the framework*/
10630 size_t j = 0;
10631 if (size > max_size) {
10632 size = max_size;
10633 }
10634 size_t focus_count = CAM_FOCUS_MODE_MAX;
10635 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10636 focus_count);
10637 for (size_t i = 0; i < size; i++) {
10638 bool supt = false;
10639 size_t index = supported_indexes[i];
10640 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10641 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10642 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10643 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10644 overridesTable[index].awb_mode);
10645 if (NAME_NOT_FOUND != val) {
10646 overridesList[j+1] = (uint8_t)val;
10647 }
10648 uint8_t focus_override = overridesTable[index].af_mode;
10649 for (size_t k = 0; k < focus_count; k++) {
10650 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10651 supt = true;
10652 break;
10653 }
10654 }
10655 if (supt) {
10656 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10657 focus_override);
10658 if (NAME_NOT_FOUND != val) {
10659 overridesList[j+2] = (uint8_t)val;
10660 }
10661 } else {
10662 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10663 }
10664 j+=3;
10665 }
10666}
10667
10668/*===========================================================================
10669 * FUNCTION : filterJpegSizes
10670 *
10671 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10672 * could be downscaled to
10673 *
10674 * PARAMETERS :
10675 *
10676 * RETURN : length of jpegSizes array
10677 *==========================================================================*/
10678
10679size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10680 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10681 uint8_t downscale_factor)
10682{
10683 if (0 == downscale_factor) {
10684 downscale_factor = 1;
10685 }
10686
10687 int32_t min_width = active_array_size.width / downscale_factor;
10688 int32_t min_height = active_array_size.height / downscale_factor;
10689 size_t jpegSizesCnt = 0;
10690 if (processedSizesCnt > maxCount) {
10691 processedSizesCnt = maxCount;
10692 }
10693 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10694 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10695 jpegSizes[jpegSizesCnt] = processedSizes[i];
10696 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10697 jpegSizesCnt += 2;
10698 }
10699 }
10700 return jpegSizesCnt;
10701}
10702
10703/*===========================================================================
10704 * FUNCTION : computeNoiseModelEntryS
10705 *
10706 * DESCRIPTION: function to map a given sensitivity to the S noise
10707 * model parameters in the DNG noise model.
10708 *
10709 * PARAMETERS : sens : the sensor sensitivity
10710 *
10711 ** RETURN : S (sensor amplification) noise
10712 *
10713 *==========================================================================*/
10714double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10715 double s = gCamCapability[mCameraId]->gradient_S * sens +
10716 gCamCapability[mCameraId]->offset_S;
10717 return ((s < 0.0) ? 0.0 : s);
10718}
10719
10720/*===========================================================================
10721 * FUNCTION : computeNoiseModelEntryO
10722 *
10723 * DESCRIPTION: function to map a given sensitivity to the O noise
10724 * model parameters in the DNG noise model.
10725 *
10726 * PARAMETERS : sens : the sensor sensitivity
10727 *
10728 ** RETURN : O (sensor readout) noise
10729 *
10730 *==========================================================================*/
10731double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10732 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10733 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10734 1.0 : (1.0 * sens / max_analog_sens);
10735 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10736 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10737 return ((o < 0.0) ? 0.0 : o);
10738}
10739
10740/*===========================================================================
10741 * FUNCTION : getSensorSensitivity
10742 *
10743 * DESCRIPTION: convert iso_mode to an integer value
10744 *
10745 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10746 *
10747 ** RETURN : sensitivity supported by sensor
10748 *
10749 *==========================================================================*/
10750int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10751{
10752 int32_t sensitivity;
10753
10754 switch (iso_mode) {
10755 case CAM_ISO_MODE_100:
10756 sensitivity = 100;
10757 break;
10758 case CAM_ISO_MODE_200:
10759 sensitivity = 200;
10760 break;
10761 case CAM_ISO_MODE_400:
10762 sensitivity = 400;
10763 break;
10764 case CAM_ISO_MODE_800:
10765 sensitivity = 800;
10766 break;
10767 case CAM_ISO_MODE_1600:
10768 sensitivity = 1600;
10769 break;
10770 default:
10771 sensitivity = -1;
10772 break;
10773 }
10774 return sensitivity;
10775}
10776
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010777int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010778 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010779 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10780 // to connect to Easel.
10781 bool doNotpowerOnEasel =
10782 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10783
10784 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010785 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10786 return OK;
10787 }
10788
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010789 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010790 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010791 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010792 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010793 return res;
10794 }
10795
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010796 EaselManagerClientOpened = true;
10797
10798 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010799 if (res != OK) {
10800 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10801 }
10802
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010803 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010804 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010805
10806 // Expose enableZsl key only when HDR+ mode is enabled.
10807 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010808 }
10809
10810 return OK;
10811}
10812
Thierry Strudel3d639192016-09-09 11:52:26 -070010813/*===========================================================================
10814 * FUNCTION : getCamInfo
10815 *
10816 * DESCRIPTION: query camera capabilities
10817 *
10818 * PARAMETERS :
10819 * @cameraId : camera Id
10820 * @info : camera info struct to be filled in with camera capabilities
10821 *
10822 * RETURN : int type of status
10823 * NO_ERROR -- success
10824 * none-zero failure code
10825 *==========================================================================*/
10826int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10827 struct camera_info *info)
10828{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010829 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010830 int rc = 0;
10831
10832 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010833
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010834 {
10835 Mutex::Autolock l(gHdrPlusClientLock);
10836 rc = initHdrPlusClientLocked();
10837 if (rc != OK) {
10838 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10839 pthread_mutex_unlock(&gCamLock);
10840 return rc;
10841 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010842 }
10843
Thierry Strudel3d639192016-09-09 11:52:26 -070010844 if (NULL == gCamCapability[cameraId]) {
10845 rc = initCapabilities(cameraId);
10846 if (rc < 0) {
10847 pthread_mutex_unlock(&gCamLock);
10848 return rc;
10849 }
10850 }
10851
10852 if (NULL == gStaticMetadata[cameraId]) {
10853 rc = initStaticMetadata(cameraId);
10854 if (rc < 0) {
10855 pthread_mutex_unlock(&gCamLock);
10856 return rc;
10857 }
10858 }
10859
10860 switch(gCamCapability[cameraId]->position) {
10861 case CAM_POSITION_BACK:
10862 case CAM_POSITION_BACK_AUX:
10863 info->facing = CAMERA_FACING_BACK;
10864 break;
10865
10866 case CAM_POSITION_FRONT:
10867 case CAM_POSITION_FRONT_AUX:
10868 info->facing = CAMERA_FACING_FRONT;
10869 break;
10870
10871 default:
10872 LOGE("Unknown position type %d for camera id:%d",
10873 gCamCapability[cameraId]->position, cameraId);
10874 rc = -1;
10875 break;
10876 }
10877
10878
10879 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010880#ifndef USE_HAL_3_3
10881 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10882#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010883 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010884#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010885 info->static_camera_characteristics = gStaticMetadata[cameraId];
10886
10887 //For now assume both cameras can operate independently.
10888 info->conflicting_devices = NULL;
10889 info->conflicting_devices_length = 0;
10890
10891 //resource cost is 100 * MIN(1.0, m/M),
10892 //where m is throughput requirement with maximum stream configuration
10893 //and M is CPP maximum throughput.
10894 float max_fps = 0.0;
10895 for (uint32_t i = 0;
10896 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10897 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10898 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10899 }
10900 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10901 gCamCapability[cameraId]->active_array_size.width *
10902 gCamCapability[cameraId]->active_array_size.height * max_fps /
10903 gCamCapability[cameraId]->max_pixel_bandwidth;
10904 info->resource_cost = 100 * MIN(1.0, ratio);
10905 LOGI("camera %d resource cost is %d", cameraId,
10906 info->resource_cost);
10907
10908 pthread_mutex_unlock(&gCamLock);
10909 return rc;
10910}
10911
10912/*===========================================================================
10913 * FUNCTION : translateCapabilityToMetadata
10914 *
10915 * DESCRIPTION: translate the capability into camera_metadata_t
10916 *
10917 * PARAMETERS : type of the request
10918 *
10919 *
10920 * RETURN : success: camera_metadata_t*
10921 * failure: NULL
10922 *
10923 *==========================================================================*/
10924camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10925{
10926 if (mDefaultMetadata[type] != NULL) {
10927 return mDefaultMetadata[type];
10928 }
10929 //first time we are handling this request
10930 //fill up the metadata structure using the wrapper class
10931 CameraMetadata settings;
10932 //translate from cam_capability_t to camera_metadata_tag_t
10933 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10934 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10935 int32_t defaultRequestID = 0;
10936 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10937
10938 /* OIS disable */
10939 char ois_prop[PROPERTY_VALUE_MAX];
10940 memset(ois_prop, 0, sizeof(ois_prop));
10941 property_get("persist.camera.ois.disable", ois_prop, "0");
10942 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10943
10944 /* Force video to use OIS */
10945 char videoOisProp[PROPERTY_VALUE_MAX];
10946 memset(videoOisProp, 0, sizeof(videoOisProp));
10947 property_get("persist.camera.ois.video", videoOisProp, "1");
10948 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010949
10950 // Hybrid AE enable/disable
10951 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10952 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10953 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10954 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10955
Thierry Strudel3d639192016-09-09 11:52:26 -070010956 uint8_t controlIntent = 0;
10957 uint8_t focusMode;
10958 uint8_t vsMode;
10959 uint8_t optStabMode;
10960 uint8_t cacMode;
10961 uint8_t edge_mode;
10962 uint8_t noise_red_mode;
10963 uint8_t tonemap_mode;
10964 bool highQualityModeEntryAvailable = FALSE;
10965 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010966 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010967 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10968 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010969 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010970 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010971 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010972
Thierry Strudel3d639192016-09-09 11:52:26 -070010973 switch (type) {
10974 case CAMERA3_TEMPLATE_PREVIEW:
10975 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10976 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10977 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10978 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10979 edge_mode = ANDROID_EDGE_MODE_FAST;
10980 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10981 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10982 break;
10983 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10984 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10985 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10986 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10987 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10988 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10989 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10990 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10991 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10992 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10993 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10994 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10995 highQualityModeEntryAvailable = TRUE;
10996 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10997 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10998 fastModeEntryAvailable = TRUE;
10999 }
11000 }
11001 if (highQualityModeEntryAvailable) {
11002 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11003 } else if (fastModeEntryAvailable) {
11004 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11005 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011006 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11007 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11008 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011009 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011010 break;
11011 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11012 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11013 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11014 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011015 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11016 edge_mode = ANDROID_EDGE_MODE_FAST;
11017 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11018 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11019 if (forceVideoOis)
11020 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11021 break;
11022 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11023 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11024 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11025 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011026 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11027 edge_mode = ANDROID_EDGE_MODE_FAST;
11028 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11029 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11030 if (forceVideoOis)
11031 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11032 break;
11033 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11034 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11035 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11036 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11037 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11038 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11039 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11040 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11041 break;
11042 case CAMERA3_TEMPLATE_MANUAL:
11043 edge_mode = ANDROID_EDGE_MODE_FAST;
11044 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11045 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11046 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11047 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11048 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11049 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11050 break;
11051 default:
11052 edge_mode = ANDROID_EDGE_MODE_FAST;
11053 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11054 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11055 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11056 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11057 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11058 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11059 break;
11060 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011061 // Set CAC to OFF if underlying device doesn't support
11062 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11063 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11064 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011065 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11066 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11067 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11068 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11069 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11070 }
11071 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011072 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011073 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011074
11075 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11076 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11077 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11078 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11079 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11080 || ois_disable)
11081 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11082 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011083 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011084
11085 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11086 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11087
11088 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11089 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11090
11091 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11092 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11093
11094 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11095 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11096
11097 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11098 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11099
11100 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11101 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11102
11103 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11104 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11105
11106 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11107 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11108
11109 /*flash*/
11110 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11111 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11112
11113 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11114 settings.update(ANDROID_FLASH_FIRING_POWER,
11115 &flashFiringLevel, 1);
11116
11117 /* lens */
11118 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11119 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11120
11121 if (gCamCapability[mCameraId]->filter_densities_count) {
11122 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11123 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11124 gCamCapability[mCameraId]->filter_densities_count);
11125 }
11126
11127 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11128 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11129
Thierry Strudel3d639192016-09-09 11:52:26 -070011130 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11131 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11132
11133 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11134 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11135
11136 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11137 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11138
11139 /* face detection (default to OFF) */
11140 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11141 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11142
Thierry Strudel54dc9782017-02-15 12:12:10 -080011143 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11144 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011145
11146 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11147 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11148
11149 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11150 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11151
Thierry Strudel3d639192016-09-09 11:52:26 -070011152
11153 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11154 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11155
11156 /* Exposure time(Update the Min Exposure Time)*/
11157 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11158 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11159
11160 /* frame duration */
11161 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11162 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11163
11164 /* sensitivity */
11165 static const int32_t default_sensitivity = 100;
11166 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011167#ifndef USE_HAL_3_3
11168 static const int32_t default_isp_sensitivity =
11169 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11170 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11171#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011172
11173 /*edge mode*/
11174 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11175
11176 /*noise reduction mode*/
11177 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11178
11179 /*color correction mode*/
11180 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11181 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11182
11183 /*transform matrix mode*/
11184 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11185
11186 int32_t scaler_crop_region[4];
11187 scaler_crop_region[0] = 0;
11188 scaler_crop_region[1] = 0;
11189 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11190 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11191 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11192
11193 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11194 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11195
11196 /*focus distance*/
11197 float focus_distance = 0.0;
11198 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11199
11200 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011201 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011202 float max_range = 0.0;
11203 float max_fixed_fps = 0.0;
11204 int32_t fps_range[2] = {0, 0};
11205 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11206 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011207 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11208 TEMPLATE_MAX_PREVIEW_FPS) {
11209 continue;
11210 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011211 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11212 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11213 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11214 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11215 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11216 if (range > max_range) {
11217 fps_range[0] =
11218 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11219 fps_range[1] =
11220 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11221 max_range = range;
11222 }
11223 } else {
11224 if (range < 0.01 && max_fixed_fps <
11225 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11226 fps_range[0] =
11227 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11228 fps_range[1] =
11229 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11230 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11231 }
11232 }
11233 }
11234 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11235
11236 /*precapture trigger*/
11237 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11238 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11239
11240 /*af trigger*/
11241 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11242 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11243
11244 /* ae & af regions */
11245 int32_t active_region[] = {
11246 gCamCapability[mCameraId]->active_array_size.left,
11247 gCamCapability[mCameraId]->active_array_size.top,
11248 gCamCapability[mCameraId]->active_array_size.left +
11249 gCamCapability[mCameraId]->active_array_size.width,
11250 gCamCapability[mCameraId]->active_array_size.top +
11251 gCamCapability[mCameraId]->active_array_size.height,
11252 0};
11253 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11254 sizeof(active_region) / sizeof(active_region[0]));
11255 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11256 sizeof(active_region) / sizeof(active_region[0]));
11257
11258 /* black level lock */
11259 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11260 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11261
Thierry Strudel3d639192016-09-09 11:52:26 -070011262 //special defaults for manual template
11263 if (type == CAMERA3_TEMPLATE_MANUAL) {
11264 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11265 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11266
11267 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11268 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11269
11270 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11271 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11272
11273 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11274 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11275
11276 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11277 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11278
11279 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11280 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11281 }
11282
11283
11284 /* TNR
11285 * We'll use this location to determine which modes TNR will be set.
11286 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11287 * This is not to be confused with linking on a per stream basis that decision
11288 * is still on per-session basis and will be handled as part of config stream
11289 */
11290 uint8_t tnr_enable = 0;
11291
11292 if (m_bTnrPreview || m_bTnrVideo) {
11293
11294 switch (type) {
11295 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11296 tnr_enable = 1;
11297 break;
11298
11299 default:
11300 tnr_enable = 0;
11301 break;
11302 }
11303
11304 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11305 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11306 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11307
11308 LOGD("TNR:%d with process plate %d for template:%d",
11309 tnr_enable, tnr_process_type, type);
11310 }
11311
11312 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011313 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011314 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11315
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011316 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011317 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11318
Shuzhen Wang920ea402017-05-03 08:49:39 -070011319 uint8_t related_camera_id = mCameraId;
11320 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011321
11322 /* CDS default */
11323 char prop[PROPERTY_VALUE_MAX];
11324 memset(prop, 0, sizeof(prop));
11325 property_get("persist.camera.CDS", prop, "Auto");
11326 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11327 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11328 if (CAM_CDS_MODE_MAX == cds_mode) {
11329 cds_mode = CAM_CDS_MODE_AUTO;
11330 }
11331
11332 /* Disabling CDS in templates which have TNR enabled*/
11333 if (tnr_enable)
11334 cds_mode = CAM_CDS_MODE_OFF;
11335
11336 int32_t mode = cds_mode;
11337 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011338
Thierry Strudel269c81a2016-10-12 12:13:59 -070011339 /* Manual Convergence AEC Speed is disabled by default*/
11340 float default_aec_speed = 0;
11341 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11342
11343 /* Manual Convergence AWB Speed is disabled by default*/
11344 float default_awb_speed = 0;
11345 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11346
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011347 // Set instant AEC to normal convergence by default
11348 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11349 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11350
Shuzhen Wang19463d72016-03-08 11:09:52 -080011351 /* hybrid ae */
11352 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11353
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011354 if (gExposeEnableZslKey) {
11355 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11356 }
11357
Thierry Strudel3d639192016-09-09 11:52:26 -070011358 mDefaultMetadata[type] = settings.release();
11359
11360 return mDefaultMetadata[type];
11361}
11362
11363/*===========================================================================
11364 * FUNCTION : setFrameParameters
11365 *
11366 * DESCRIPTION: set parameters per frame as requested in the metadata from
11367 * framework
11368 *
11369 * PARAMETERS :
11370 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011371 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011372 * @blob_request: Whether this request is a blob request or not
11373 *
11374 * RETURN : success: NO_ERROR
11375 * failure:
11376 *==========================================================================*/
11377int QCamera3HardwareInterface::setFrameParameters(
11378 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011379 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011380 int blob_request,
11381 uint32_t snapshotStreamId)
11382{
11383 /*translate from camera_metadata_t type to parm_type_t*/
11384 int rc = 0;
11385 int32_t hal_version = CAM_HAL_V3;
11386
11387 clear_metadata_buffer(mParameters);
11388 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11389 LOGE("Failed to set hal version in the parameters");
11390 return BAD_VALUE;
11391 }
11392
11393 /*we need to update the frame number in the parameters*/
11394 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11395 request->frame_number)) {
11396 LOGE("Failed to set the frame number in the parameters");
11397 return BAD_VALUE;
11398 }
11399
11400 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011401 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011402 LOGE("Failed to set stream type mask in the parameters");
11403 return BAD_VALUE;
11404 }
11405
11406 if (mUpdateDebugLevel) {
11407 uint32_t dummyDebugLevel = 0;
11408 /* The value of dummyDebugLevel is irrelavent. On
11409 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11410 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11411 dummyDebugLevel)) {
11412 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11413 return BAD_VALUE;
11414 }
11415 mUpdateDebugLevel = false;
11416 }
11417
11418 if(request->settings != NULL){
11419 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11420 if (blob_request)
11421 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11422 }
11423
11424 return rc;
11425}
11426
11427/*===========================================================================
11428 * FUNCTION : setReprocParameters
11429 *
11430 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11431 * return it.
11432 *
11433 * PARAMETERS :
11434 * @request : request that needs to be serviced
11435 *
11436 * RETURN : success: NO_ERROR
11437 * failure:
11438 *==========================================================================*/
11439int32_t QCamera3HardwareInterface::setReprocParameters(
11440 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11441 uint32_t snapshotStreamId)
11442{
11443 /*translate from camera_metadata_t type to parm_type_t*/
11444 int rc = 0;
11445
11446 if (NULL == request->settings){
11447 LOGE("Reprocess settings cannot be NULL");
11448 return BAD_VALUE;
11449 }
11450
11451 if (NULL == reprocParam) {
11452 LOGE("Invalid reprocessing metadata buffer");
11453 return BAD_VALUE;
11454 }
11455 clear_metadata_buffer(reprocParam);
11456
11457 /*we need to update the frame number in the parameters*/
11458 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11459 request->frame_number)) {
11460 LOGE("Failed to set the frame number in the parameters");
11461 return BAD_VALUE;
11462 }
11463
11464 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11465 if (rc < 0) {
11466 LOGE("Failed to translate reproc request");
11467 return rc;
11468 }
11469
11470 CameraMetadata frame_settings;
11471 frame_settings = request->settings;
11472 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11473 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11474 int32_t *crop_count =
11475 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11476 int32_t *crop_data =
11477 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11478 int32_t *roi_map =
11479 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11480 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11481 cam_crop_data_t crop_meta;
11482 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11483 crop_meta.num_of_streams = 1;
11484 crop_meta.crop_info[0].crop.left = crop_data[0];
11485 crop_meta.crop_info[0].crop.top = crop_data[1];
11486 crop_meta.crop_info[0].crop.width = crop_data[2];
11487 crop_meta.crop_info[0].crop.height = crop_data[3];
11488
11489 crop_meta.crop_info[0].roi_map.left =
11490 roi_map[0];
11491 crop_meta.crop_info[0].roi_map.top =
11492 roi_map[1];
11493 crop_meta.crop_info[0].roi_map.width =
11494 roi_map[2];
11495 crop_meta.crop_info[0].roi_map.height =
11496 roi_map[3];
11497
11498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11499 rc = BAD_VALUE;
11500 }
11501 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11502 request->input_buffer->stream,
11503 crop_meta.crop_info[0].crop.left,
11504 crop_meta.crop_info[0].crop.top,
11505 crop_meta.crop_info[0].crop.width,
11506 crop_meta.crop_info[0].crop.height);
11507 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11508 request->input_buffer->stream,
11509 crop_meta.crop_info[0].roi_map.left,
11510 crop_meta.crop_info[0].roi_map.top,
11511 crop_meta.crop_info[0].roi_map.width,
11512 crop_meta.crop_info[0].roi_map.height);
11513 } else {
11514 LOGE("Invalid reprocess crop count %d!", *crop_count);
11515 }
11516 } else {
11517 LOGE("No crop data from matching output stream");
11518 }
11519
11520 /* These settings are not needed for regular requests so handle them specially for
11521 reprocess requests; information needed for EXIF tags */
11522 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11523 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11524 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11525 if (NAME_NOT_FOUND != val) {
11526 uint32_t flashMode = (uint32_t)val;
11527 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11528 rc = BAD_VALUE;
11529 }
11530 } else {
11531 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11532 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11533 }
11534 } else {
11535 LOGH("No flash mode in reprocess settings");
11536 }
11537
11538 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11539 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11540 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11541 rc = BAD_VALUE;
11542 }
11543 } else {
11544 LOGH("No flash state in reprocess settings");
11545 }
11546
11547 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11548 uint8_t *reprocessFlags =
11549 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11550 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11551 *reprocessFlags)) {
11552 rc = BAD_VALUE;
11553 }
11554 }
11555
Thierry Strudel54dc9782017-02-15 12:12:10 -080011556 // Add exif debug data to internal metadata
11557 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11558 mm_jpeg_debug_exif_params_t *debug_params =
11559 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11560 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11561 // AE
11562 if (debug_params->ae_debug_params_valid == TRUE) {
11563 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11564 debug_params->ae_debug_params);
11565 }
11566 // AWB
11567 if (debug_params->awb_debug_params_valid == TRUE) {
11568 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11569 debug_params->awb_debug_params);
11570 }
11571 // AF
11572 if (debug_params->af_debug_params_valid == TRUE) {
11573 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11574 debug_params->af_debug_params);
11575 }
11576 // ASD
11577 if (debug_params->asd_debug_params_valid == TRUE) {
11578 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11579 debug_params->asd_debug_params);
11580 }
11581 // Stats
11582 if (debug_params->stats_debug_params_valid == TRUE) {
11583 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11584 debug_params->stats_debug_params);
11585 }
11586 // BE Stats
11587 if (debug_params->bestats_debug_params_valid == TRUE) {
11588 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11589 debug_params->bestats_debug_params);
11590 }
11591 // BHIST
11592 if (debug_params->bhist_debug_params_valid == TRUE) {
11593 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11594 debug_params->bhist_debug_params);
11595 }
11596 // 3A Tuning
11597 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11598 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11599 debug_params->q3a_tuning_debug_params);
11600 }
11601 }
11602
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011603 // Add metadata which reprocess needs
11604 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11605 cam_reprocess_info_t *repro_info =
11606 (cam_reprocess_info_t *)frame_settings.find
11607 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011608 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011609 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011610 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011611 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011612 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011613 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011614 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011615 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011616 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011617 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011618 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011619 repro_info->pipeline_flip);
11620 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11621 repro_info->af_roi);
11622 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11623 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011624 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11625 CAM_INTF_PARM_ROTATION metadata then has been added in
11626 translateToHalMetadata. HAL need to keep this new rotation
11627 metadata. Otherwise, the old rotation info saved in the vendor tag
11628 would be used */
11629 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11630 CAM_INTF_PARM_ROTATION, reprocParam) {
11631 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11632 } else {
11633 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011634 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011635 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011636 }
11637
11638 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11639 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11640 roi.width and roi.height would be the final JPEG size.
11641 For now, HAL only checks this for reprocess request */
11642 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11643 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11644 uint8_t *enable =
11645 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11646 if (*enable == TRUE) {
11647 int32_t *crop_data =
11648 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11649 cam_stream_crop_info_t crop_meta;
11650 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11651 crop_meta.stream_id = 0;
11652 crop_meta.crop.left = crop_data[0];
11653 crop_meta.crop.top = crop_data[1];
11654 crop_meta.crop.width = crop_data[2];
11655 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011656 // The JPEG crop roi should match cpp output size
11657 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11658 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11659 crop_meta.roi_map.left = 0;
11660 crop_meta.roi_map.top = 0;
11661 crop_meta.roi_map.width = cpp_crop->crop.width;
11662 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011663 }
11664 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11665 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011666 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011667 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011668 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11669 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011670 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011671 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11672
11673 // Add JPEG scale information
11674 cam_dimension_t scale_dim;
11675 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11676 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11677 int32_t *roi =
11678 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11679 scale_dim.width = roi[2];
11680 scale_dim.height = roi[3];
11681 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11682 scale_dim);
11683 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11684 scale_dim.width, scale_dim.height, mCameraId);
11685 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011686 }
11687 }
11688
11689 return rc;
11690}
11691
11692/*===========================================================================
11693 * FUNCTION : saveRequestSettings
11694 *
11695 * DESCRIPTION: Add any settings that might have changed to the request settings
11696 * and save the settings to be applied on the frame
11697 *
11698 * PARAMETERS :
11699 * @jpegMetadata : the extracted and/or modified jpeg metadata
11700 * @request : request with initial settings
11701 *
11702 * RETURN :
11703 * camera_metadata_t* : pointer to the saved request settings
11704 *==========================================================================*/
11705camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11706 const CameraMetadata &jpegMetadata,
11707 camera3_capture_request_t *request)
11708{
11709 camera_metadata_t *resultMetadata;
11710 CameraMetadata camMetadata;
11711 camMetadata = request->settings;
11712
11713 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11714 int32_t thumbnail_size[2];
11715 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11716 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11717 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11718 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11719 }
11720
11721 if (request->input_buffer != NULL) {
11722 uint8_t reprocessFlags = 1;
11723 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11724 (uint8_t*)&reprocessFlags,
11725 sizeof(reprocessFlags));
11726 }
11727
11728 resultMetadata = camMetadata.release();
11729 return resultMetadata;
11730}
11731
11732/*===========================================================================
11733 * FUNCTION : setHalFpsRange
11734 *
11735 * DESCRIPTION: set FPS range parameter
11736 *
11737 *
11738 * PARAMETERS :
11739 * @settings : Metadata from framework
11740 * @hal_metadata: Metadata buffer
11741 *
11742 *
11743 * RETURN : success: NO_ERROR
11744 * failure:
11745 *==========================================================================*/
11746int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11747 metadata_buffer_t *hal_metadata)
11748{
11749 int32_t rc = NO_ERROR;
11750 cam_fps_range_t fps_range;
11751 fps_range.min_fps = (float)
11752 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11753 fps_range.max_fps = (float)
11754 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11755 fps_range.video_min_fps = fps_range.min_fps;
11756 fps_range.video_max_fps = fps_range.max_fps;
11757
11758 LOGD("aeTargetFpsRange fps: [%f %f]",
11759 fps_range.min_fps, fps_range.max_fps);
11760 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11761 * follows:
11762 * ---------------------------------------------------------------|
11763 * Video stream is absent in configure_streams |
11764 * (Camcorder preview before the first video record |
11765 * ---------------------------------------------------------------|
11766 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11767 * | | | vid_min/max_fps|
11768 * ---------------------------------------------------------------|
11769 * NO | [ 30, 240] | 240 | [240, 240] |
11770 * |-------------|-------------|----------------|
11771 * | [240, 240] | 240 | [240, 240] |
11772 * ---------------------------------------------------------------|
11773 * Video stream is present in configure_streams |
11774 * ---------------------------------------------------------------|
11775 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11776 * | | | vid_min/max_fps|
11777 * ---------------------------------------------------------------|
11778 * NO | [ 30, 240] | 240 | [240, 240] |
11779 * (camcorder prev |-------------|-------------|----------------|
11780 * after video rec | [240, 240] | 240 | [240, 240] |
11781 * is stopped) | | | |
11782 * ---------------------------------------------------------------|
11783 * YES | [ 30, 240] | 240 | [240, 240] |
11784 * |-------------|-------------|----------------|
11785 * | [240, 240] | 240 | [240, 240] |
11786 * ---------------------------------------------------------------|
11787 * When Video stream is absent in configure_streams,
11788 * preview fps = sensor_fps / batchsize
11789 * Eg: for 240fps at batchSize 4, preview = 60fps
11790 * for 120fps at batchSize 4, preview = 30fps
11791 *
11792 * When video stream is present in configure_streams, preview fps is as per
11793 * the ratio of preview buffers to video buffers requested in process
11794 * capture request
11795 */
11796 mBatchSize = 0;
11797 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11798 fps_range.min_fps = fps_range.video_max_fps;
11799 fps_range.video_min_fps = fps_range.video_max_fps;
11800 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11801 fps_range.max_fps);
11802 if (NAME_NOT_FOUND != val) {
11803 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11805 return BAD_VALUE;
11806 }
11807
11808 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11809 /* If batchmode is currently in progress and the fps changes,
11810 * set the flag to restart the sensor */
11811 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11812 (mHFRVideoFps != fps_range.max_fps)) {
11813 mNeedSensorRestart = true;
11814 }
11815 mHFRVideoFps = fps_range.max_fps;
11816 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11817 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11818 mBatchSize = MAX_HFR_BATCH_SIZE;
11819 }
11820 }
11821 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11822
11823 }
11824 } else {
11825 /* HFR mode is session param in backend/ISP. This should be reset when
11826 * in non-HFR mode */
11827 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11828 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11829 return BAD_VALUE;
11830 }
11831 }
11832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11833 return BAD_VALUE;
11834 }
11835 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11836 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11837 return rc;
11838}
11839
11840/*===========================================================================
11841 * FUNCTION : translateToHalMetadata
11842 *
11843 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11844 *
11845 *
11846 * PARAMETERS :
11847 * @request : request sent from framework
11848 *
11849 *
11850 * RETURN : success: NO_ERROR
11851 * failure:
11852 *==========================================================================*/
11853int QCamera3HardwareInterface::translateToHalMetadata
11854 (const camera3_capture_request_t *request,
11855 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011856 uint32_t snapshotStreamId) {
11857 if (request == nullptr || hal_metadata == nullptr) {
11858 return BAD_VALUE;
11859 }
11860
11861 int64_t minFrameDuration = getMinFrameDuration(request);
11862
11863 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11864 minFrameDuration);
11865}
11866
11867int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11868 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11869 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11870
Thierry Strudel3d639192016-09-09 11:52:26 -070011871 int rc = 0;
11872 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011873 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011874
11875 /* Do not change the order of the following list unless you know what you are
11876 * doing.
11877 * The order is laid out in such a way that parameters in the front of the table
11878 * may be used to override the parameters later in the table. Examples are:
11879 * 1. META_MODE should precede AEC/AWB/AF MODE
11880 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11881 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11882 * 4. Any mode should precede it's corresponding settings
11883 */
11884 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11885 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11887 rc = BAD_VALUE;
11888 }
11889 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11890 if (rc != NO_ERROR) {
11891 LOGE("extractSceneMode failed");
11892 }
11893 }
11894
11895 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11896 uint8_t fwk_aeMode =
11897 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11898 uint8_t aeMode;
11899 int32_t redeye;
11900
11901 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11902 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011903 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11904 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011905 } else {
11906 aeMode = CAM_AE_MODE_ON;
11907 }
11908 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11909 redeye = 1;
11910 } else {
11911 redeye = 0;
11912 }
11913
11914 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11915 fwk_aeMode);
11916 if (NAME_NOT_FOUND != val) {
11917 int32_t flashMode = (int32_t)val;
11918 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11919 }
11920
11921 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11922 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11923 rc = BAD_VALUE;
11924 }
11925 }
11926
11927 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11928 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11929 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11930 fwk_whiteLevel);
11931 if (NAME_NOT_FOUND != val) {
11932 uint8_t whiteLevel = (uint8_t)val;
11933 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11934 rc = BAD_VALUE;
11935 }
11936 }
11937 }
11938
11939 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11940 uint8_t fwk_cacMode =
11941 frame_settings.find(
11942 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11943 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11944 fwk_cacMode);
11945 if (NAME_NOT_FOUND != val) {
11946 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11947 bool entryAvailable = FALSE;
11948 // Check whether Frameworks set CAC mode is supported in device or not
11949 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11950 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11951 entryAvailable = TRUE;
11952 break;
11953 }
11954 }
11955 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11956 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11957 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11958 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11959 if (entryAvailable == FALSE) {
11960 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11961 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11962 } else {
11963 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11964 // High is not supported and so set the FAST as spec say's underlying
11965 // device implementation can be the same for both modes.
11966 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11967 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11968 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11969 // in order to avoid the fps drop due to high quality
11970 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11971 } else {
11972 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11973 }
11974 }
11975 }
11976 LOGD("Final cacMode is %d", cacMode);
11977 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11978 rc = BAD_VALUE;
11979 }
11980 } else {
11981 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11982 }
11983 }
11984
Thierry Strudel2896d122017-02-23 19:18:03 -080011985 char af_value[PROPERTY_VALUE_MAX];
11986 property_get("persist.camera.af.infinity", af_value, "0");
11987
Jason Lee84ae9972017-02-24 13:24:24 -080011988 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011989 if (atoi(af_value) == 0) {
11990 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011991 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011992 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11993 fwk_focusMode);
11994 if (NAME_NOT_FOUND != val) {
11995 uint8_t focusMode = (uint8_t)val;
11996 LOGD("set focus mode %d", focusMode);
11997 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11998 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11999 rc = BAD_VALUE;
12000 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012001 }
12002 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012003 } else {
12004 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12005 LOGE("Focus forced to infinity %d", focusMode);
12006 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12007 rc = BAD_VALUE;
12008 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012009 }
12010
Jason Lee84ae9972017-02-24 13:24:24 -080012011 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12012 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012013 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12014 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12015 focalDistance)) {
12016 rc = BAD_VALUE;
12017 }
12018 }
12019
12020 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12021 uint8_t fwk_antibandingMode =
12022 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12023 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12024 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12025 if (NAME_NOT_FOUND != val) {
12026 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012027 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12028 if (m60HzZone) {
12029 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12030 } else {
12031 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12032 }
12033 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012034 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12035 hal_antibandingMode)) {
12036 rc = BAD_VALUE;
12037 }
12038 }
12039 }
12040
12041 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12042 int32_t expCompensation = frame_settings.find(
12043 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12044 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12045 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12046 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12047 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012048 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012049 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12050 expCompensation)) {
12051 rc = BAD_VALUE;
12052 }
12053 }
12054
12055 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12056 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12057 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12058 rc = BAD_VALUE;
12059 }
12060 }
12061 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12062 rc = setHalFpsRange(frame_settings, hal_metadata);
12063 if (rc != NO_ERROR) {
12064 LOGE("setHalFpsRange failed");
12065 }
12066 }
12067
12068 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12069 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12070 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12071 rc = BAD_VALUE;
12072 }
12073 }
12074
12075 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12076 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12077 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12078 fwk_effectMode);
12079 if (NAME_NOT_FOUND != val) {
12080 uint8_t effectMode = (uint8_t)val;
12081 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12082 rc = BAD_VALUE;
12083 }
12084 }
12085 }
12086
12087 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12088 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12089 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12090 colorCorrectMode)) {
12091 rc = BAD_VALUE;
12092 }
12093 }
12094
12095 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12096 cam_color_correct_gains_t colorCorrectGains;
12097 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12098 colorCorrectGains.gains[i] =
12099 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12100 }
12101 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12102 colorCorrectGains)) {
12103 rc = BAD_VALUE;
12104 }
12105 }
12106
12107 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12108 cam_color_correct_matrix_t colorCorrectTransform;
12109 cam_rational_type_t transform_elem;
12110 size_t num = 0;
12111 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12112 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12113 transform_elem.numerator =
12114 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12115 transform_elem.denominator =
12116 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12117 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12118 num++;
12119 }
12120 }
12121 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12122 colorCorrectTransform)) {
12123 rc = BAD_VALUE;
12124 }
12125 }
12126
12127 cam_trigger_t aecTrigger;
12128 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12129 aecTrigger.trigger_id = -1;
12130 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12131 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12132 aecTrigger.trigger =
12133 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12134 aecTrigger.trigger_id =
12135 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12136 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12137 aecTrigger)) {
12138 rc = BAD_VALUE;
12139 }
12140 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12141 aecTrigger.trigger, aecTrigger.trigger_id);
12142 }
12143
12144 /*af_trigger must come with a trigger id*/
12145 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12146 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12147 cam_trigger_t af_trigger;
12148 af_trigger.trigger =
12149 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12150 af_trigger.trigger_id =
12151 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12152 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12153 rc = BAD_VALUE;
12154 }
12155 LOGD("AfTrigger: %d AfTriggerID: %d",
12156 af_trigger.trigger, af_trigger.trigger_id);
12157 }
12158
12159 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12160 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12162 rc = BAD_VALUE;
12163 }
12164 }
12165 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12166 cam_edge_application_t edge_application;
12167 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012168
Thierry Strudel3d639192016-09-09 11:52:26 -070012169 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12170 edge_application.sharpness = 0;
12171 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012172 edge_application.sharpness =
12173 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12174 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12175 int32_t sharpness =
12176 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12177 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12178 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12179 LOGD("Setting edge mode sharpness %d", sharpness);
12180 edge_application.sharpness = sharpness;
12181 }
12182 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012183 }
12184 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12185 rc = BAD_VALUE;
12186 }
12187 }
12188
12189 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12190 int32_t respectFlashMode = 1;
12191 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12192 uint8_t fwk_aeMode =
12193 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012194 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12195 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12196 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012197 respectFlashMode = 0;
12198 LOGH("AE Mode controls flash, ignore android.flash.mode");
12199 }
12200 }
12201 if (respectFlashMode) {
12202 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12203 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12204 LOGH("flash mode after mapping %d", val);
12205 // To check: CAM_INTF_META_FLASH_MODE usage
12206 if (NAME_NOT_FOUND != val) {
12207 uint8_t flashMode = (uint8_t)val;
12208 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12209 rc = BAD_VALUE;
12210 }
12211 }
12212 }
12213 }
12214
12215 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12216 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12217 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12218 rc = BAD_VALUE;
12219 }
12220 }
12221
12222 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12223 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12224 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12225 flashFiringTime)) {
12226 rc = BAD_VALUE;
12227 }
12228 }
12229
12230 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12231 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12232 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12233 hotPixelMode)) {
12234 rc = BAD_VALUE;
12235 }
12236 }
12237
12238 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12239 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12240 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12241 lensAperture)) {
12242 rc = BAD_VALUE;
12243 }
12244 }
12245
12246 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12247 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12248 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12249 filterDensity)) {
12250 rc = BAD_VALUE;
12251 }
12252 }
12253
12254 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12255 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12256 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12257 focalLength)) {
12258 rc = BAD_VALUE;
12259 }
12260 }
12261
12262 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12263 uint8_t optStabMode =
12264 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12266 optStabMode)) {
12267 rc = BAD_VALUE;
12268 }
12269 }
12270
12271 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12272 uint8_t videoStabMode =
12273 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12274 LOGD("videoStabMode from APP = %d", videoStabMode);
12275 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12276 videoStabMode)) {
12277 rc = BAD_VALUE;
12278 }
12279 }
12280
12281
12282 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12283 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12284 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12285 noiseRedMode)) {
12286 rc = BAD_VALUE;
12287 }
12288 }
12289
12290 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12291 float reprocessEffectiveExposureFactor =
12292 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12294 reprocessEffectiveExposureFactor)) {
12295 rc = BAD_VALUE;
12296 }
12297 }
12298
12299 cam_crop_region_t scalerCropRegion;
12300 bool scalerCropSet = false;
12301 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12302 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12303 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12304 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12305 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12306
12307 // Map coordinate system from active array to sensor output.
12308 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12309 scalerCropRegion.width, scalerCropRegion.height);
12310
12311 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12312 scalerCropRegion)) {
12313 rc = BAD_VALUE;
12314 }
12315 scalerCropSet = true;
12316 }
12317
12318 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12319 int64_t sensorExpTime =
12320 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12321 LOGD("setting sensorExpTime %lld", sensorExpTime);
12322 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12323 sensorExpTime)) {
12324 rc = BAD_VALUE;
12325 }
12326 }
12327
12328 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12329 int64_t sensorFrameDuration =
12330 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012331 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12332 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12333 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12334 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12335 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12336 sensorFrameDuration)) {
12337 rc = BAD_VALUE;
12338 }
12339 }
12340
12341 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12342 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12343 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12344 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12345 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12346 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12347 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12348 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12349 sensorSensitivity)) {
12350 rc = BAD_VALUE;
12351 }
12352 }
12353
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012354#ifndef USE_HAL_3_3
12355 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12356 int32_t ispSensitivity =
12357 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12358 if (ispSensitivity <
12359 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12360 ispSensitivity =
12361 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12362 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12363 }
12364 if (ispSensitivity >
12365 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12366 ispSensitivity =
12367 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12368 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12369 }
12370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12371 ispSensitivity)) {
12372 rc = BAD_VALUE;
12373 }
12374 }
12375#endif
12376
Thierry Strudel3d639192016-09-09 11:52:26 -070012377 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12378 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12379 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12380 rc = BAD_VALUE;
12381 }
12382 }
12383
12384 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12385 uint8_t fwk_facedetectMode =
12386 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12387
12388 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12389 fwk_facedetectMode);
12390
12391 if (NAME_NOT_FOUND != val) {
12392 uint8_t facedetectMode = (uint8_t)val;
12393 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12394 facedetectMode)) {
12395 rc = BAD_VALUE;
12396 }
12397 }
12398 }
12399
Thierry Strudel54dc9782017-02-15 12:12:10 -080012400 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012401 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012402 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012403 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12404 histogramMode)) {
12405 rc = BAD_VALUE;
12406 }
12407 }
12408
12409 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12410 uint8_t sharpnessMapMode =
12411 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12412 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12413 sharpnessMapMode)) {
12414 rc = BAD_VALUE;
12415 }
12416 }
12417
12418 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12419 uint8_t tonemapMode =
12420 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12421 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12422 rc = BAD_VALUE;
12423 }
12424 }
12425 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12426 /*All tonemap channels will have the same number of points*/
12427 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12428 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12429 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12430 cam_rgb_tonemap_curves tonemapCurves;
12431 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12432 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12433 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12434 tonemapCurves.tonemap_points_cnt,
12435 CAM_MAX_TONEMAP_CURVE_SIZE);
12436 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12437 }
12438
12439 /* ch0 = G*/
12440 size_t point = 0;
12441 cam_tonemap_curve_t tonemapCurveGreen;
12442 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12443 for (size_t j = 0; j < 2; j++) {
12444 tonemapCurveGreen.tonemap_points[i][j] =
12445 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12446 point++;
12447 }
12448 }
12449 tonemapCurves.curves[0] = tonemapCurveGreen;
12450
12451 /* ch 1 = B */
12452 point = 0;
12453 cam_tonemap_curve_t tonemapCurveBlue;
12454 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12455 for (size_t j = 0; j < 2; j++) {
12456 tonemapCurveBlue.tonemap_points[i][j] =
12457 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12458 point++;
12459 }
12460 }
12461 tonemapCurves.curves[1] = tonemapCurveBlue;
12462
12463 /* ch 2 = R */
12464 point = 0;
12465 cam_tonemap_curve_t tonemapCurveRed;
12466 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12467 for (size_t j = 0; j < 2; j++) {
12468 tonemapCurveRed.tonemap_points[i][j] =
12469 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12470 point++;
12471 }
12472 }
12473 tonemapCurves.curves[2] = tonemapCurveRed;
12474
12475 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12476 tonemapCurves)) {
12477 rc = BAD_VALUE;
12478 }
12479 }
12480
12481 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12482 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12483 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12484 captureIntent)) {
12485 rc = BAD_VALUE;
12486 }
12487 }
12488
12489 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12490 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12491 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12492 blackLevelLock)) {
12493 rc = BAD_VALUE;
12494 }
12495 }
12496
12497 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12498 uint8_t lensShadingMapMode =
12499 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12500 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12501 lensShadingMapMode)) {
12502 rc = BAD_VALUE;
12503 }
12504 }
12505
12506 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12507 cam_area_t roi;
12508 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012509 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012510
12511 // Map coordinate system from active array to sensor output.
12512 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12513 roi.rect.height);
12514
12515 if (scalerCropSet) {
12516 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12517 }
12518 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12519 rc = BAD_VALUE;
12520 }
12521 }
12522
12523 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12524 cam_area_t roi;
12525 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012526 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012527
12528 // Map coordinate system from active array to sensor output.
12529 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12530 roi.rect.height);
12531
12532 if (scalerCropSet) {
12533 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12534 }
12535 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12536 rc = BAD_VALUE;
12537 }
12538 }
12539
12540 // CDS for non-HFR non-video mode
12541 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12542 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12543 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12544 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12545 LOGE("Invalid CDS mode %d!", *fwk_cds);
12546 } else {
12547 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12548 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12549 rc = BAD_VALUE;
12550 }
12551 }
12552 }
12553
Thierry Strudel04e026f2016-10-10 11:27:36 -070012554 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012555 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012556 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012557 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12558 }
12559 if (m_bVideoHdrEnabled)
12560 vhdr = CAM_VIDEO_HDR_MODE_ON;
12561
Thierry Strudel54dc9782017-02-15 12:12:10 -080012562 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12563
12564 if(vhdr != curr_hdr_state)
12565 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12566
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012567 rc = setVideoHdrMode(mParameters, vhdr);
12568 if (rc != NO_ERROR) {
12569 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012570 }
12571
12572 //IR
12573 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12574 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12575 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012576 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12577 uint8_t isIRon = 0;
12578
12579 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012580 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12581 LOGE("Invalid IR mode %d!", fwk_ir);
12582 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012583 if(isIRon != curr_ir_state )
12584 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12585
Thierry Strudel04e026f2016-10-10 11:27:36 -070012586 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12587 CAM_INTF_META_IR_MODE, fwk_ir)) {
12588 rc = BAD_VALUE;
12589 }
12590 }
12591 }
12592
Thierry Strudel54dc9782017-02-15 12:12:10 -080012593 //Binning Correction Mode
12594 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12595 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12596 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12597 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12598 || (0 > fwk_binning_correction)) {
12599 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12600 } else {
12601 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12602 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12603 rc = BAD_VALUE;
12604 }
12605 }
12606 }
12607
Thierry Strudel269c81a2016-10-12 12:13:59 -070012608 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12609 float aec_speed;
12610 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12611 LOGD("AEC Speed :%f", aec_speed);
12612 if ( aec_speed < 0 ) {
12613 LOGE("Invalid AEC mode %f!", aec_speed);
12614 } else {
12615 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12616 aec_speed)) {
12617 rc = BAD_VALUE;
12618 }
12619 }
12620 }
12621
12622 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12623 float awb_speed;
12624 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12625 LOGD("AWB Speed :%f", awb_speed);
12626 if ( awb_speed < 0 ) {
12627 LOGE("Invalid AWB mode %f!", awb_speed);
12628 } else {
12629 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12630 awb_speed)) {
12631 rc = BAD_VALUE;
12632 }
12633 }
12634 }
12635
Thierry Strudel3d639192016-09-09 11:52:26 -070012636 // TNR
12637 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12638 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12639 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012640 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012641 cam_denoise_param_t tnr;
12642 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12643 tnr.process_plates =
12644 (cam_denoise_process_type_t)frame_settings.find(
12645 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12646 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012647
12648 if(b_TnrRequested != curr_tnr_state)
12649 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12650
Thierry Strudel3d639192016-09-09 11:52:26 -070012651 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12652 rc = BAD_VALUE;
12653 }
12654 }
12655
Thierry Strudel54dc9782017-02-15 12:12:10 -080012656 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012657 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012658 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012659 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12660 *exposure_metering_mode)) {
12661 rc = BAD_VALUE;
12662 }
12663 }
12664
Thierry Strudel3d639192016-09-09 11:52:26 -070012665 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12666 int32_t fwk_testPatternMode =
12667 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12668 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12669 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12670
12671 if (NAME_NOT_FOUND != testPatternMode) {
12672 cam_test_pattern_data_t testPatternData;
12673 memset(&testPatternData, 0, sizeof(testPatternData));
12674 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12675 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12676 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12677 int32_t *fwk_testPatternData =
12678 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12679 testPatternData.r = fwk_testPatternData[0];
12680 testPatternData.b = fwk_testPatternData[3];
12681 switch (gCamCapability[mCameraId]->color_arrangement) {
12682 case CAM_FILTER_ARRANGEMENT_RGGB:
12683 case CAM_FILTER_ARRANGEMENT_GRBG:
12684 testPatternData.gr = fwk_testPatternData[1];
12685 testPatternData.gb = fwk_testPatternData[2];
12686 break;
12687 case CAM_FILTER_ARRANGEMENT_GBRG:
12688 case CAM_FILTER_ARRANGEMENT_BGGR:
12689 testPatternData.gr = fwk_testPatternData[2];
12690 testPatternData.gb = fwk_testPatternData[1];
12691 break;
12692 default:
12693 LOGE("color arrangement %d is not supported",
12694 gCamCapability[mCameraId]->color_arrangement);
12695 break;
12696 }
12697 }
12698 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12699 testPatternData)) {
12700 rc = BAD_VALUE;
12701 }
12702 } else {
12703 LOGE("Invalid framework sensor test pattern mode %d",
12704 fwk_testPatternMode);
12705 }
12706 }
12707
12708 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12709 size_t count = 0;
12710 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12711 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12712 gps_coords.data.d, gps_coords.count, count);
12713 if (gps_coords.count != count) {
12714 rc = BAD_VALUE;
12715 }
12716 }
12717
12718 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12719 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12720 size_t count = 0;
12721 const char *gps_methods_src = (const char *)
12722 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12723 memset(gps_methods, '\0', sizeof(gps_methods));
12724 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12725 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12726 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12727 if (GPS_PROCESSING_METHOD_SIZE != count) {
12728 rc = BAD_VALUE;
12729 }
12730 }
12731
12732 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12733 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12735 gps_timestamp)) {
12736 rc = BAD_VALUE;
12737 }
12738 }
12739
12740 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12741 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12742 cam_rotation_info_t rotation_info;
12743 if (orientation == 0) {
12744 rotation_info.rotation = ROTATE_0;
12745 } else if (orientation == 90) {
12746 rotation_info.rotation = ROTATE_90;
12747 } else if (orientation == 180) {
12748 rotation_info.rotation = ROTATE_180;
12749 } else if (orientation == 270) {
12750 rotation_info.rotation = ROTATE_270;
12751 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012752 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012753 rotation_info.streamId = snapshotStreamId;
12754 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12755 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12756 rc = BAD_VALUE;
12757 }
12758 }
12759
12760 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12761 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12763 rc = BAD_VALUE;
12764 }
12765 }
12766
12767 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12768 uint32_t thumb_quality = (uint32_t)
12769 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12770 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12771 thumb_quality)) {
12772 rc = BAD_VALUE;
12773 }
12774 }
12775
12776 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12777 cam_dimension_t dim;
12778 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12779 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12781 rc = BAD_VALUE;
12782 }
12783 }
12784
12785 // Internal metadata
12786 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12787 size_t count = 0;
12788 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12789 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12790 privatedata.data.i32, privatedata.count, count);
12791 if (privatedata.count != count) {
12792 rc = BAD_VALUE;
12793 }
12794 }
12795
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012796 // ISO/Exposure Priority
12797 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12798 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12799 cam_priority_mode_t mode =
12800 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12801 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12802 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12803 use_iso_exp_pty.previewOnly = FALSE;
12804 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12805 use_iso_exp_pty.value = *ptr;
12806
12807 if(CAM_ISO_PRIORITY == mode) {
12808 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12809 use_iso_exp_pty)) {
12810 rc = BAD_VALUE;
12811 }
12812 }
12813 else {
12814 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12815 use_iso_exp_pty)) {
12816 rc = BAD_VALUE;
12817 }
12818 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012819
12820 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12821 rc = BAD_VALUE;
12822 }
12823 }
12824 } else {
12825 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12826 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012827 }
12828 }
12829
12830 // Saturation
12831 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12832 int32_t* use_saturation =
12833 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12834 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12835 rc = BAD_VALUE;
12836 }
12837 }
12838
Thierry Strudel3d639192016-09-09 11:52:26 -070012839 // EV step
12840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12841 gCamCapability[mCameraId]->exp_compensation_step)) {
12842 rc = BAD_VALUE;
12843 }
12844
12845 // CDS info
12846 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12847 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12848 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12849
12850 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12851 CAM_INTF_META_CDS_DATA, *cdsData)) {
12852 rc = BAD_VALUE;
12853 }
12854 }
12855
Shuzhen Wang19463d72016-03-08 11:09:52 -080012856 // Hybrid AE
12857 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12858 uint8_t *hybrid_ae = (uint8_t *)
12859 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12860
12861 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12862 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12863 rc = BAD_VALUE;
12864 }
12865 }
12866
Shuzhen Wang14415f52016-11-16 18:26:18 -080012867 // Histogram
12868 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12869 uint8_t histogramMode =
12870 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12871 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12872 histogramMode)) {
12873 rc = BAD_VALUE;
12874 }
12875 }
12876
12877 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12878 int32_t histogramBins =
12879 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12880 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12881 histogramBins)) {
12882 rc = BAD_VALUE;
12883 }
12884 }
12885
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012886 // Tracking AF
12887 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12888 uint8_t trackingAfTrigger =
12889 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12890 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12891 trackingAfTrigger)) {
12892 rc = BAD_VALUE;
12893 }
12894 }
12895
Thierry Strudel3d639192016-09-09 11:52:26 -070012896 return rc;
12897}
12898
12899/*===========================================================================
12900 * FUNCTION : captureResultCb
12901 *
12902 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12903 *
12904 * PARAMETERS :
12905 * @frame : frame information from mm-camera-interface
12906 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12907 * @userdata: userdata
12908 *
12909 * RETURN : NONE
12910 *==========================================================================*/
12911void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12912 camera3_stream_buffer_t *buffer,
12913 uint32_t frame_number, bool isInputBuffer, void *userdata)
12914{
12915 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12916 if (hw == NULL) {
12917 LOGE("Invalid hw %p", hw);
12918 return;
12919 }
12920
12921 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12922 return;
12923}
12924
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012925/*===========================================================================
12926 * FUNCTION : setBufferErrorStatus
12927 *
12928 * DESCRIPTION: Callback handler for channels to report any buffer errors
12929 *
12930 * PARAMETERS :
12931 * @ch : Channel on which buffer error is reported from
12932 * @frame_number : frame number on which buffer error is reported on
12933 * @buffer_status : buffer error status
12934 * @userdata: userdata
12935 *
12936 * RETURN : NONE
12937 *==========================================================================*/
12938void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12939 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12940{
12941 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12942 if (hw == NULL) {
12943 LOGE("Invalid hw %p", hw);
12944 return;
12945 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012946
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012947 hw->setBufferErrorStatus(ch, frame_number, err);
12948 return;
12949}
12950
12951void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12952 uint32_t frameNumber, camera3_buffer_status_t err)
12953{
12954 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12955 pthread_mutex_lock(&mMutex);
12956
12957 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12958 if (req.frame_number != frameNumber)
12959 continue;
12960 for (auto& k : req.mPendingBufferList) {
12961 if(k.stream->priv == ch) {
12962 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12963 }
12964 }
12965 }
12966
12967 pthread_mutex_unlock(&mMutex);
12968 return;
12969}
Thierry Strudel3d639192016-09-09 11:52:26 -070012970/*===========================================================================
12971 * FUNCTION : initialize
12972 *
12973 * DESCRIPTION: Pass framework callback pointers to HAL
12974 *
12975 * PARAMETERS :
12976 *
12977 *
12978 * RETURN : Success : 0
12979 * Failure: -ENODEV
12980 *==========================================================================*/
12981
12982int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12983 const camera3_callback_ops_t *callback_ops)
12984{
12985 LOGD("E");
12986 QCamera3HardwareInterface *hw =
12987 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12988 if (!hw) {
12989 LOGE("NULL camera device");
12990 return -ENODEV;
12991 }
12992
12993 int rc = hw->initialize(callback_ops);
12994 LOGD("X");
12995 return rc;
12996}
12997
12998/*===========================================================================
12999 * FUNCTION : configure_streams
13000 *
13001 * DESCRIPTION:
13002 *
13003 * PARAMETERS :
13004 *
13005 *
13006 * RETURN : Success: 0
13007 * Failure: -EINVAL (if stream configuration is invalid)
13008 * -ENODEV (fatal error)
13009 *==========================================================================*/
13010
13011int QCamera3HardwareInterface::configure_streams(
13012 const struct camera3_device *device,
13013 camera3_stream_configuration_t *stream_list)
13014{
13015 LOGD("E");
13016 QCamera3HardwareInterface *hw =
13017 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13018 if (!hw) {
13019 LOGE("NULL camera device");
13020 return -ENODEV;
13021 }
13022 int rc = hw->configureStreams(stream_list);
13023 LOGD("X");
13024 return rc;
13025}
13026
13027/*===========================================================================
13028 * FUNCTION : construct_default_request_settings
13029 *
13030 * DESCRIPTION: Configure a settings buffer to meet the required use case
13031 *
13032 * PARAMETERS :
13033 *
13034 *
13035 * RETURN : Success: Return valid metadata
13036 * Failure: Return NULL
13037 *==========================================================================*/
13038const camera_metadata_t* QCamera3HardwareInterface::
13039 construct_default_request_settings(const struct camera3_device *device,
13040 int type)
13041{
13042
13043 LOGD("E");
13044 camera_metadata_t* fwk_metadata = NULL;
13045 QCamera3HardwareInterface *hw =
13046 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13047 if (!hw) {
13048 LOGE("NULL camera device");
13049 return NULL;
13050 }
13051
13052 fwk_metadata = hw->translateCapabilityToMetadata(type);
13053
13054 LOGD("X");
13055 return fwk_metadata;
13056}
13057
13058/*===========================================================================
13059 * FUNCTION : process_capture_request
13060 *
13061 * DESCRIPTION:
13062 *
13063 * PARAMETERS :
13064 *
13065 *
13066 * RETURN :
13067 *==========================================================================*/
13068int QCamera3HardwareInterface::process_capture_request(
13069 const struct camera3_device *device,
13070 camera3_capture_request_t *request)
13071{
13072 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013073 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013074 QCamera3HardwareInterface *hw =
13075 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13076 if (!hw) {
13077 LOGE("NULL camera device");
13078 return -EINVAL;
13079 }
13080
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013081 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013082 LOGD("X");
13083 return rc;
13084}
13085
13086/*===========================================================================
13087 * FUNCTION : dump
13088 *
13089 * DESCRIPTION:
13090 *
13091 * PARAMETERS :
13092 *
13093 *
13094 * RETURN :
13095 *==========================================================================*/
13096
13097void QCamera3HardwareInterface::dump(
13098 const struct camera3_device *device, int fd)
13099{
13100 /* Log level property is read when "adb shell dumpsys media.camera" is
13101 called so that the log level can be controlled without restarting
13102 the media server */
13103 getLogLevel();
13104
13105 LOGD("E");
13106 QCamera3HardwareInterface *hw =
13107 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13108 if (!hw) {
13109 LOGE("NULL camera device");
13110 return;
13111 }
13112
13113 hw->dump(fd);
13114 LOGD("X");
13115 return;
13116}
13117
13118/*===========================================================================
13119 * FUNCTION : flush
13120 *
13121 * DESCRIPTION:
13122 *
13123 * PARAMETERS :
13124 *
13125 *
13126 * RETURN :
13127 *==========================================================================*/
13128
13129int QCamera3HardwareInterface::flush(
13130 const struct camera3_device *device)
13131{
13132 int rc;
13133 LOGD("E");
13134 QCamera3HardwareInterface *hw =
13135 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13136 if (!hw) {
13137 LOGE("NULL camera device");
13138 return -EINVAL;
13139 }
13140
13141 pthread_mutex_lock(&hw->mMutex);
13142 // Validate current state
13143 switch (hw->mState) {
13144 case STARTED:
13145 /* valid state */
13146 break;
13147
13148 case ERROR:
13149 pthread_mutex_unlock(&hw->mMutex);
13150 hw->handleCameraDeviceError();
13151 return -ENODEV;
13152
13153 default:
13154 LOGI("Flush returned during state %d", hw->mState);
13155 pthread_mutex_unlock(&hw->mMutex);
13156 return 0;
13157 }
13158 pthread_mutex_unlock(&hw->mMutex);
13159
13160 rc = hw->flush(true /* restart channels */ );
13161 LOGD("X");
13162 return rc;
13163}
13164
13165/*===========================================================================
13166 * FUNCTION : close_camera_device
13167 *
13168 * DESCRIPTION:
13169 *
13170 * PARAMETERS :
13171 *
13172 *
13173 * RETURN :
13174 *==========================================================================*/
13175int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13176{
13177 int ret = NO_ERROR;
13178 QCamera3HardwareInterface *hw =
13179 reinterpret_cast<QCamera3HardwareInterface *>(
13180 reinterpret_cast<camera3_device_t *>(device)->priv);
13181 if (!hw) {
13182 LOGE("NULL camera device");
13183 return BAD_VALUE;
13184 }
13185
13186 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13187 delete hw;
13188 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013189 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013190 return ret;
13191}
13192
13193/*===========================================================================
13194 * FUNCTION : getWaveletDenoiseProcessPlate
13195 *
13196 * DESCRIPTION: query wavelet denoise process plate
13197 *
13198 * PARAMETERS : None
13199 *
13200 * RETURN : WNR prcocess plate value
13201 *==========================================================================*/
13202cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13203{
13204 char prop[PROPERTY_VALUE_MAX];
13205 memset(prop, 0, sizeof(prop));
13206 property_get("persist.denoise.process.plates", prop, "0");
13207 int processPlate = atoi(prop);
13208 switch(processPlate) {
13209 case 0:
13210 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13211 case 1:
13212 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13213 case 2:
13214 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13215 case 3:
13216 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13217 default:
13218 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13219 }
13220}
13221
13222
13223/*===========================================================================
13224 * FUNCTION : getTemporalDenoiseProcessPlate
13225 *
13226 * DESCRIPTION: query temporal denoise process plate
13227 *
13228 * PARAMETERS : None
13229 *
13230 * RETURN : TNR prcocess plate value
13231 *==========================================================================*/
13232cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13233{
13234 char prop[PROPERTY_VALUE_MAX];
13235 memset(prop, 0, sizeof(prop));
13236 property_get("persist.tnr.process.plates", prop, "0");
13237 int processPlate = atoi(prop);
13238 switch(processPlate) {
13239 case 0:
13240 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13241 case 1:
13242 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13243 case 2:
13244 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13245 case 3:
13246 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13247 default:
13248 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13249 }
13250}
13251
13252
13253/*===========================================================================
13254 * FUNCTION : extractSceneMode
13255 *
13256 * DESCRIPTION: Extract scene mode from frameworks set metadata
13257 *
13258 * PARAMETERS :
13259 * @frame_settings: CameraMetadata reference
13260 * @metaMode: ANDROID_CONTORL_MODE
13261 * @hal_metadata: hal metadata structure
13262 *
13263 * RETURN : None
13264 *==========================================================================*/
13265int32_t QCamera3HardwareInterface::extractSceneMode(
13266 const CameraMetadata &frame_settings, uint8_t metaMode,
13267 metadata_buffer_t *hal_metadata)
13268{
13269 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013270 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13271
13272 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13273 LOGD("Ignoring control mode OFF_KEEP_STATE");
13274 return NO_ERROR;
13275 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013276
13277 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13278 camera_metadata_ro_entry entry =
13279 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13280 if (0 == entry.count)
13281 return rc;
13282
13283 uint8_t fwk_sceneMode = entry.data.u8[0];
13284
13285 int val = lookupHalName(SCENE_MODES_MAP,
13286 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13287 fwk_sceneMode);
13288 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013289 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013290 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013291 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013292 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013293
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013294 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13295 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13296 }
13297
13298 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13299 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013300 cam_hdr_param_t hdr_params;
13301 hdr_params.hdr_enable = 1;
13302 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13303 hdr_params.hdr_need_1x = false;
13304 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13305 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13306 rc = BAD_VALUE;
13307 }
13308 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013309
Thierry Strudel3d639192016-09-09 11:52:26 -070013310 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13311 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13312 rc = BAD_VALUE;
13313 }
13314 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013315
13316 if (mForceHdrSnapshot) {
13317 cam_hdr_param_t hdr_params;
13318 hdr_params.hdr_enable = 1;
13319 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13320 hdr_params.hdr_need_1x = false;
13321 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13322 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13323 rc = BAD_VALUE;
13324 }
13325 }
13326
Thierry Strudel3d639192016-09-09 11:52:26 -070013327 return rc;
13328}
13329
13330/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013331 * FUNCTION : setVideoHdrMode
13332 *
13333 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13334 *
13335 * PARAMETERS :
13336 * @hal_metadata: hal metadata structure
13337 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13338 *
13339 * RETURN : None
13340 *==========================================================================*/
13341int32_t QCamera3HardwareInterface::setVideoHdrMode(
13342 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13343{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013344 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13345 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13346 }
13347
13348 LOGE("Invalid Video HDR mode %d!", vhdr);
13349 return BAD_VALUE;
13350}
13351
13352/*===========================================================================
13353 * FUNCTION : setSensorHDR
13354 *
13355 * DESCRIPTION: Enable/disable sensor HDR.
13356 *
13357 * PARAMETERS :
13358 * @hal_metadata: hal metadata structure
13359 * @enable: boolean whether to enable/disable sensor HDR
13360 *
13361 * RETURN : None
13362 *==========================================================================*/
13363int32_t QCamera3HardwareInterface::setSensorHDR(
13364 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13365{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013366 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013367 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13368
13369 if (enable) {
13370 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13371 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13372 #ifdef _LE_CAMERA_
13373 //Default to staggered HDR for IOT
13374 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13375 #else
13376 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13377 #endif
13378 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13379 }
13380
13381 bool isSupported = false;
13382 switch (sensor_hdr) {
13383 case CAM_SENSOR_HDR_IN_SENSOR:
13384 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13385 CAM_QCOM_FEATURE_SENSOR_HDR) {
13386 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013387 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013388 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013389 break;
13390 case CAM_SENSOR_HDR_ZIGZAG:
13391 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13392 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13393 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013394 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013395 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013396 break;
13397 case CAM_SENSOR_HDR_STAGGERED:
13398 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13399 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13400 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013401 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013402 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013403 break;
13404 case CAM_SENSOR_HDR_OFF:
13405 isSupported = true;
13406 LOGD("Turning off sensor HDR");
13407 break;
13408 default:
13409 LOGE("HDR mode %d not supported", sensor_hdr);
13410 rc = BAD_VALUE;
13411 break;
13412 }
13413
13414 if(isSupported) {
13415 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13416 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13417 rc = BAD_VALUE;
13418 } else {
13419 if(!isVideoHdrEnable)
13420 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013421 }
13422 }
13423 return rc;
13424}
13425
13426/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013427 * FUNCTION : needRotationReprocess
13428 *
13429 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13430 *
13431 * PARAMETERS : none
13432 *
13433 * RETURN : true: needed
13434 * false: no need
13435 *==========================================================================*/
13436bool QCamera3HardwareInterface::needRotationReprocess()
13437{
13438 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13439 // current rotation is not zero, and pp has the capability to process rotation
13440 LOGH("need do reprocess for rotation");
13441 return true;
13442 }
13443
13444 return false;
13445}
13446
13447/*===========================================================================
13448 * FUNCTION : needReprocess
13449 *
13450 * DESCRIPTION: if reprocess in needed
13451 *
13452 * PARAMETERS : none
13453 *
13454 * RETURN : true: needed
13455 * false: no need
13456 *==========================================================================*/
13457bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13458{
13459 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13460 // TODO: add for ZSL HDR later
13461 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13462 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13463 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13464 return true;
13465 } else {
13466 LOGH("already post processed frame");
13467 return false;
13468 }
13469 }
13470 return needRotationReprocess();
13471}
13472
13473/*===========================================================================
13474 * FUNCTION : needJpegExifRotation
13475 *
13476 * DESCRIPTION: if rotation from jpeg is needed
13477 *
13478 * PARAMETERS : none
13479 *
13480 * RETURN : true: needed
13481 * false: no need
13482 *==========================================================================*/
13483bool QCamera3HardwareInterface::needJpegExifRotation()
13484{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013485 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013486 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13487 LOGD("Need use Jpeg EXIF Rotation");
13488 return true;
13489 }
13490 return false;
13491}
13492
13493/*===========================================================================
13494 * FUNCTION : addOfflineReprocChannel
13495 *
13496 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13497 * coming from input channel
13498 *
13499 * PARAMETERS :
13500 * @config : reprocess configuration
13501 * @inputChHandle : pointer to the input (source) channel
13502 *
13503 *
13504 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13505 *==========================================================================*/
13506QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13507 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13508{
13509 int32_t rc = NO_ERROR;
13510 QCamera3ReprocessChannel *pChannel = NULL;
13511
13512 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013513 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13514 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013515 if (NULL == pChannel) {
13516 LOGE("no mem for reprocess channel");
13517 return NULL;
13518 }
13519
13520 rc = pChannel->initialize(IS_TYPE_NONE);
13521 if (rc != NO_ERROR) {
13522 LOGE("init reprocess channel failed, ret = %d", rc);
13523 delete pChannel;
13524 return NULL;
13525 }
13526
13527 // pp feature config
13528 cam_pp_feature_config_t pp_config;
13529 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13530
13531 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13532 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13533 & CAM_QCOM_FEATURE_DSDN) {
13534 //Use CPP CDS incase h/w supports it.
13535 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13536 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13537 }
13538 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13539 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13540 }
13541
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013542 if (config.hdr_param.hdr_enable) {
13543 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13544 pp_config.hdr_param = config.hdr_param;
13545 }
13546
13547 if (mForceHdrSnapshot) {
13548 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13549 pp_config.hdr_param.hdr_enable = 1;
13550 pp_config.hdr_param.hdr_need_1x = 0;
13551 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13552 }
13553
Thierry Strudel3d639192016-09-09 11:52:26 -070013554 rc = pChannel->addReprocStreamsFromSource(pp_config,
13555 config,
13556 IS_TYPE_NONE,
13557 mMetadataChannel);
13558
13559 if (rc != NO_ERROR) {
13560 delete pChannel;
13561 return NULL;
13562 }
13563 return pChannel;
13564}
13565
13566/*===========================================================================
13567 * FUNCTION : getMobicatMask
13568 *
13569 * DESCRIPTION: returns mobicat mask
13570 *
13571 * PARAMETERS : none
13572 *
13573 * RETURN : mobicat mask
13574 *
13575 *==========================================================================*/
13576uint8_t QCamera3HardwareInterface::getMobicatMask()
13577{
13578 return m_MobicatMask;
13579}
13580
13581/*===========================================================================
13582 * FUNCTION : setMobicat
13583 *
13584 * DESCRIPTION: set Mobicat on/off.
13585 *
13586 * PARAMETERS :
13587 * @params : none
13588 *
13589 * RETURN : int32_t type of status
13590 * NO_ERROR -- success
13591 * none-zero failure code
13592 *==========================================================================*/
13593int32_t QCamera3HardwareInterface::setMobicat()
13594{
13595 char value [PROPERTY_VALUE_MAX];
13596 property_get("persist.camera.mobicat", value, "0");
13597 int32_t ret = NO_ERROR;
13598 uint8_t enableMobi = (uint8_t)atoi(value);
13599
13600 if (enableMobi) {
13601 tune_cmd_t tune_cmd;
13602 tune_cmd.type = SET_RELOAD_CHROMATIX;
13603 tune_cmd.module = MODULE_ALL;
13604 tune_cmd.value = TRUE;
13605 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13606 CAM_INTF_PARM_SET_VFE_COMMAND,
13607 tune_cmd);
13608
13609 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13610 CAM_INTF_PARM_SET_PP_COMMAND,
13611 tune_cmd);
13612 }
13613 m_MobicatMask = enableMobi;
13614
13615 return ret;
13616}
13617
13618/*===========================================================================
13619* FUNCTION : getLogLevel
13620*
13621* DESCRIPTION: Reads the log level property into a variable
13622*
13623* PARAMETERS :
13624* None
13625*
13626* RETURN :
13627* None
13628*==========================================================================*/
13629void QCamera3HardwareInterface::getLogLevel()
13630{
13631 char prop[PROPERTY_VALUE_MAX];
13632 uint32_t globalLogLevel = 0;
13633
13634 property_get("persist.camera.hal.debug", prop, "0");
13635 int val = atoi(prop);
13636 if (0 <= val) {
13637 gCamHal3LogLevel = (uint32_t)val;
13638 }
13639
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013640 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013641 gKpiDebugLevel = atoi(prop);
13642
13643 property_get("persist.camera.global.debug", prop, "0");
13644 val = atoi(prop);
13645 if (0 <= val) {
13646 globalLogLevel = (uint32_t)val;
13647 }
13648
13649 /* Highest log level among hal.logs and global.logs is selected */
13650 if (gCamHal3LogLevel < globalLogLevel)
13651 gCamHal3LogLevel = globalLogLevel;
13652
13653 return;
13654}
13655
13656/*===========================================================================
13657 * FUNCTION : validateStreamRotations
13658 *
13659 * DESCRIPTION: Check if the rotations requested are supported
13660 *
13661 * PARAMETERS :
13662 * @stream_list : streams to be configured
13663 *
13664 * RETURN : NO_ERROR on success
13665 * -EINVAL on failure
13666 *
13667 *==========================================================================*/
13668int QCamera3HardwareInterface::validateStreamRotations(
13669 camera3_stream_configuration_t *streamList)
13670{
13671 int rc = NO_ERROR;
13672
13673 /*
13674 * Loop through all streams requested in configuration
13675 * Check if unsupported rotations have been requested on any of them
13676 */
13677 for (size_t j = 0; j < streamList->num_streams; j++){
13678 camera3_stream_t *newStream = streamList->streams[j];
13679
Emilian Peev35ceeed2017-06-29 11:58:56 -070013680 switch(newStream->rotation) {
13681 case CAMERA3_STREAM_ROTATION_0:
13682 case CAMERA3_STREAM_ROTATION_90:
13683 case CAMERA3_STREAM_ROTATION_180:
13684 case CAMERA3_STREAM_ROTATION_270:
13685 //Expected values
13686 break;
13687 default:
13688 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13689 "type:%d and stream format:%d", __func__,
13690 newStream->rotation, newStream->stream_type,
13691 newStream->format);
13692 return -EINVAL;
13693 }
13694
Thierry Strudel3d639192016-09-09 11:52:26 -070013695 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13696 bool isImplDef = (newStream->format ==
13697 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13698 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13699 isImplDef);
13700
13701 if (isRotated && (!isImplDef || isZsl)) {
13702 LOGE("Error: Unsupported rotation of %d requested for stream"
13703 "type:%d and stream format:%d",
13704 newStream->rotation, newStream->stream_type,
13705 newStream->format);
13706 rc = -EINVAL;
13707 break;
13708 }
13709 }
13710
13711 return rc;
13712}
13713
13714/*===========================================================================
13715* FUNCTION : getFlashInfo
13716*
13717* DESCRIPTION: Retrieve information about whether the device has a flash.
13718*
13719* PARAMETERS :
13720* @cameraId : Camera id to query
13721* @hasFlash : Boolean indicating whether there is a flash device
13722* associated with given camera
13723* @flashNode : If a flash device exists, this will be its device node.
13724*
13725* RETURN :
13726* None
13727*==========================================================================*/
13728void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13729 bool& hasFlash,
13730 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13731{
13732 cam_capability_t* camCapability = gCamCapability[cameraId];
13733 if (NULL == camCapability) {
13734 hasFlash = false;
13735 flashNode[0] = '\0';
13736 } else {
13737 hasFlash = camCapability->flash_available;
13738 strlcpy(flashNode,
13739 (char*)camCapability->flash_dev_name,
13740 QCAMERA_MAX_FILEPATH_LENGTH);
13741 }
13742}
13743
13744/*===========================================================================
13745* FUNCTION : getEepromVersionInfo
13746*
13747* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13748*
13749* PARAMETERS : None
13750*
13751* RETURN : string describing EEPROM version
13752* "\0" if no such info available
13753*==========================================================================*/
13754const char *QCamera3HardwareInterface::getEepromVersionInfo()
13755{
13756 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13757}
13758
13759/*===========================================================================
13760* FUNCTION : getLdafCalib
13761*
13762* DESCRIPTION: Retrieve Laser AF calibration data
13763*
13764* PARAMETERS : None
13765*
13766* RETURN : Two uint32_t describing laser AF calibration data
13767* NULL if none is available.
13768*==========================================================================*/
13769const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13770{
13771 if (mLdafCalibExist) {
13772 return &mLdafCalib[0];
13773 } else {
13774 return NULL;
13775 }
13776}
13777
13778/*===========================================================================
13779 * FUNCTION : dynamicUpdateMetaStreamInfo
13780 *
13781 * DESCRIPTION: This function:
13782 * (1) stops all the channels
13783 * (2) returns error on pending requests and buffers
13784 * (3) sends metastream_info in setparams
13785 * (4) starts all channels
13786 * This is useful when sensor has to be restarted to apply any
13787 * settings such as frame rate from a different sensor mode
13788 *
13789 * PARAMETERS : None
13790 *
13791 * RETURN : NO_ERROR on success
13792 * Error codes on failure
13793 *
13794 *==========================================================================*/
13795int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13796{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013797 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013798 int rc = NO_ERROR;
13799
13800 LOGD("E");
13801
13802 rc = stopAllChannels();
13803 if (rc < 0) {
13804 LOGE("stopAllChannels failed");
13805 return rc;
13806 }
13807
13808 rc = notifyErrorForPendingRequests();
13809 if (rc < 0) {
13810 LOGE("notifyErrorForPendingRequests failed");
13811 return rc;
13812 }
13813
13814 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13815 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13816 "Format:%d",
13817 mStreamConfigInfo.type[i],
13818 mStreamConfigInfo.stream_sizes[i].width,
13819 mStreamConfigInfo.stream_sizes[i].height,
13820 mStreamConfigInfo.postprocess_mask[i],
13821 mStreamConfigInfo.format[i]);
13822 }
13823
13824 /* Send meta stream info once again so that ISP can start */
13825 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13826 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13827 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13828 mParameters);
13829 if (rc < 0) {
13830 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13831 }
13832
13833 rc = startAllChannels();
13834 if (rc < 0) {
13835 LOGE("startAllChannels failed");
13836 return rc;
13837 }
13838
13839 LOGD("X");
13840 return rc;
13841}
13842
13843/*===========================================================================
13844 * FUNCTION : stopAllChannels
13845 *
13846 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13847 *
13848 * PARAMETERS : None
13849 *
13850 * RETURN : NO_ERROR on success
13851 * Error codes on failure
13852 *
13853 *==========================================================================*/
13854int32_t QCamera3HardwareInterface::stopAllChannels()
13855{
13856 int32_t rc = NO_ERROR;
13857
13858 LOGD("Stopping all channels");
13859 // Stop the Streams/Channels
13860 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13861 it != mStreamInfo.end(); it++) {
13862 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13863 if (channel) {
13864 channel->stop();
13865 }
13866 (*it)->status = INVALID;
13867 }
13868
13869 if (mSupportChannel) {
13870 mSupportChannel->stop();
13871 }
13872 if (mAnalysisChannel) {
13873 mAnalysisChannel->stop();
13874 }
13875 if (mRawDumpChannel) {
13876 mRawDumpChannel->stop();
13877 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013878 if (mHdrPlusRawSrcChannel) {
13879 mHdrPlusRawSrcChannel->stop();
13880 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013881 if (mMetadataChannel) {
13882 /* If content of mStreamInfo is not 0, there is metadata stream */
13883 mMetadataChannel->stop();
13884 }
13885
13886 LOGD("All channels stopped");
13887 return rc;
13888}
13889
13890/*===========================================================================
13891 * FUNCTION : startAllChannels
13892 *
13893 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13894 *
13895 * PARAMETERS : None
13896 *
13897 * RETURN : NO_ERROR on success
13898 * Error codes on failure
13899 *
13900 *==========================================================================*/
13901int32_t QCamera3HardwareInterface::startAllChannels()
13902{
13903 int32_t rc = NO_ERROR;
13904
13905 LOGD("Start all channels ");
13906 // Start the Streams/Channels
13907 if (mMetadataChannel) {
13908 /* If content of mStreamInfo is not 0, there is metadata stream */
13909 rc = mMetadataChannel->start();
13910 if (rc < 0) {
13911 LOGE("META channel start failed");
13912 return rc;
13913 }
13914 }
13915 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13916 it != mStreamInfo.end(); it++) {
13917 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13918 if (channel) {
13919 rc = channel->start();
13920 if (rc < 0) {
13921 LOGE("channel start failed");
13922 return rc;
13923 }
13924 }
13925 }
13926 if (mAnalysisChannel) {
13927 mAnalysisChannel->start();
13928 }
13929 if (mSupportChannel) {
13930 rc = mSupportChannel->start();
13931 if (rc < 0) {
13932 LOGE("Support channel start failed");
13933 return rc;
13934 }
13935 }
13936 if (mRawDumpChannel) {
13937 rc = mRawDumpChannel->start();
13938 if (rc < 0) {
13939 LOGE("RAW dump channel start failed");
13940 return rc;
13941 }
13942 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013943 if (mHdrPlusRawSrcChannel) {
13944 rc = mHdrPlusRawSrcChannel->start();
13945 if (rc < 0) {
13946 LOGE("HDR+ RAW channel start failed");
13947 return rc;
13948 }
13949 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013950
13951 LOGD("All channels started");
13952 return rc;
13953}
13954
13955/*===========================================================================
13956 * FUNCTION : notifyErrorForPendingRequests
13957 *
13958 * DESCRIPTION: This function sends error for all the pending requests/buffers
13959 *
13960 * PARAMETERS : None
13961 *
13962 * RETURN : Error codes
13963 * NO_ERROR on success
13964 *
13965 *==========================================================================*/
13966int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13967{
Emilian Peev7650c122017-01-19 08:24:33 -080013968 notifyErrorFoPendingDepthData(mDepthChannel);
13969
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013970 auto pendingRequest = mPendingRequestsList.begin();
13971 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013972
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013973 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13974 // buffers (for which buffers aren't sent yet).
13975 while (pendingRequest != mPendingRequestsList.end() ||
13976 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13977 if (pendingRequest == mPendingRequestsList.end() ||
13978 pendingBuffer->frame_number < pendingRequest->frame_number) {
13979 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13980 // with error.
13981 for (auto &info : pendingBuffer->mPendingBufferList) {
13982 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013983 camera3_notify_msg_t notify_msg;
13984 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13985 notify_msg.type = CAMERA3_MSG_ERROR;
13986 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013987 notify_msg.message.error.error_stream = info.stream;
13988 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013989 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013990
13991 camera3_stream_buffer_t buffer = {};
13992 buffer.acquire_fence = -1;
13993 buffer.release_fence = -1;
13994 buffer.buffer = info.buffer;
13995 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13996 buffer.stream = info.stream;
13997 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070013998 }
13999
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014000 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14001 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14002 pendingBuffer->frame_number > pendingRequest->frame_number) {
14003 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014004 camera3_notify_msg_t notify_msg;
14005 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14006 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014007 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14008 notify_msg.message.error.error_stream = nullptr;
14009 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014010 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014011
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014012 if (pendingRequest->input_buffer != nullptr) {
14013 camera3_capture_result result = {};
14014 result.frame_number = pendingRequest->frame_number;
14015 result.result = nullptr;
14016 result.input_buffer = pendingRequest->input_buffer;
14017 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014018 }
14019
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014020 mShutterDispatcher.clear(pendingRequest->frame_number);
14021 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14022 } else {
14023 // If both buffers and result metadata weren't sent yet, notify about a request error
14024 // and return buffers with error.
14025 for (auto &info : pendingBuffer->mPendingBufferList) {
14026 camera3_notify_msg_t notify_msg;
14027 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14028 notify_msg.type = CAMERA3_MSG_ERROR;
14029 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14030 notify_msg.message.error.error_stream = info.stream;
14031 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14032 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014033
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014034 camera3_stream_buffer_t buffer = {};
14035 buffer.acquire_fence = -1;
14036 buffer.release_fence = -1;
14037 buffer.buffer = info.buffer;
14038 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14039 buffer.stream = info.stream;
14040 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14041 }
14042
14043 if (pendingRequest->input_buffer != nullptr) {
14044 camera3_capture_result result = {};
14045 result.frame_number = pendingRequest->frame_number;
14046 result.result = nullptr;
14047 result.input_buffer = pendingRequest->input_buffer;
14048 orchestrateResult(&result);
14049 }
14050
14051 mShutterDispatcher.clear(pendingRequest->frame_number);
14052 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14053 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014054 }
14055 }
14056
14057 /* Reset pending frame Drop list and requests list */
14058 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014059 mShutterDispatcher.clear();
14060 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014061 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014062 LOGH("Cleared all the pending buffers ");
14063
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014064 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014065}
14066
14067bool QCamera3HardwareInterface::isOnEncoder(
14068 const cam_dimension_t max_viewfinder_size,
14069 uint32_t width, uint32_t height)
14070{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014071 return ((width > (uint32_t)max_viewfinder_size.width) ||
14072 (height > (uint32_t)max_viewfinder_size.height) ||
14073 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14074 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014075}
14076
14077/*===========================================================================
14078 * FUNCTION : setBundleInfo
14079 *
14080 * DESCRIPTION: Set bundle info for all streams that are bundle.
14081 *
14082 * PARAMETERS : None
14083 *
14084 * RETURN : NO_ERROR on success
14085 * Error codes on failure
14086 *==========================================================================*/
14087int32_t QCamera3HardwareInterface::setBundleInfo()
14088{
14089 int32_t rc = NO_ERROR;
14090
14091 if (mChannelHandle) {
14092 cam_bundle_config_t bundleInfo;
14093 memset(&bundleInfo, 0, sizeof(bundleInfo));
14094 rc = mCameraHandle->ops->get_bundle_info(
14095 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14096 if (rc != NO_ERROR) {
14097 LOGE("get_bundle_info failed");
14098 return rc;
14099 }
14100 if (mAnalysisChannel) {
14101 mAnalysisChannel->setBundleInfo(bundleInfo);
14102 }
14103 if (mSupportChannel) {
14104 mSupportChannel->setBundleInfo(bundleInfo);
14105 }
14106 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14107 it != mStreamInfo.end(); it++) {
14108 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14109 channel->setBundleInfo(bundleInfo);
14110 }
14111 if (mRawDumpChannel) {
14112 mRawDumpChannel->setBundleInfo(bundleInfo);
14113 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014114 if (mHdrPlusRawSrcChannel) {
14115 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14116 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014117 }
14118
14119 return rc;
14120}
14121
14122/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014123 * FUNCTION : setInstantAEC
14124 *
14125 * DESCRIPTION: Set Instant AEC related params.
14126 *
14127 * PARAMETERS :
14128 * @meta: CameraMetadata reference
14129 *
14130 * RETURN : NO_ERROR on success
14131 * Error codes on failure
14132 *==========================================================================*/
14133int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14134{
14135 int32_t rc = NO_ERROR;
14136 uint8_t val = 0;
14137 char prop[PROPERTY_VALUE_MAX];
14138
14139 // First try to configure instant AEC from framework metadata
14140 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14141 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14142 }
14143
14144 // If framework did not set this value, try to read from set prop.
14145 if (val == 0) {
14146 memset(prop, 0, sizeof(prop));
14147 property_get("persist.camera.instant.aec", prop, "0");
14148 val = (uint8_t)atoi(prop);
14149 }
14150
14151 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14152 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14153 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14154 mInstantAEC = val;
14155 mInstantAECSettledFrameNumber = 0;
14156 mInstantAecFrameIdxCount = 0;
14157 LOGH("instantAEC value set %d",val);
14158 if (mInstantAEC) {
14159 memset(prop, 0, sizeof(prop));
14160 property_get("persist.camera.ae.instant.bound", prop, "10");
14161 int32_t aec_frame_skip_cnt = atoi(prop);
14162 if (aec_frame_skip_cnt >= 0) {
14163 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14164 } else {
14165 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14166 rc = BAD_VALUE;
14167 }
14168 }
14169 } else {
14170 LOGE("Bad instant aec value set %d", val);
14171 rc = BAD_VALUE;
14172 }
14173 return rc;
14174}
14175
14176/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014177 * FUNCTION : get_num_overall_buffers
14178 *
14179 * DESCRIPTION: Estimate number of pending buffers across all requests.
14180 *
14181 * PARAMETERS : None
14182 *
14183 * RETURN : Number of overall pending buffers
14184 *
14185 *==========================================================================*/
14186uint32_t PendingBuffersMap::get_num_overall_buffers()
14187{
14188 uint32_t sum_buffers = 0;
14189 for (auto &req : mPendingBuffersInRequest) {
14190 sum_buffers += req.mPendingBufferList.size();
14191 }
14192 return sum_buffers;
14193}
14194
14195/*===========================================================================
14196 * FUNCTION : removeBuf
14197 *
14198 * DESCRIPTION: Remove a matching buffer from tracker.
14199 *
14200 * PARAMETERS : @buffer: image buffer for the callback
14201 *
14202 * RETURN : None
14203 *
14204 *==========================================================================*/
14205void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14206{
14207 bool buffer_found = false;
14208 for (auto req = mPendingBuffersInRequest.begin();
14209 req != mPendingBuffersInRequest.end(); req++) {
14210 for (auto k = req->mPendingBufferList.begin();
14211 k != req->mPendingBufferList.end(); k++ ) {
14212 if (k->buffer == buffer) {
14213 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14214 req->frame_number, buffer);
14215 k = req->mPendingBufferList.erase(k);
14216 if (req->mPendingBufferList.empty()) {
14217 // Remove this request from Map
14218 req = mPendingBuffersInRequest.erase(req);
14219 }
14220 buffer_found = true;
14221 break;
14222 }
14223 }
14224 if (buffer_found) {
14225 break;
14226 }
14227 }
14228 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14229 get_num_overall_buffers());
14230}
14231
14232/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014233 * FUNCTION : getBufErrStatus
14234 *
14235 * DESCRIPTION: get buffer error status
14236 *
14237 * PARAMETERS : @buffer: buffer handle
14238 *
14239 * RETURN : Error status
14240 *
14241 *==========================================================================*/
14242int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14243{
14244 for (auto& req : mPendingBuffersInRequest) {
14245 for (auto& k : req.mPendingBufferList) {
14246 if (k.buffer == buffer)
14247 return k.bufStatus;
14248 }
14249 }
14250 return CAMERA3_BUFFER_STATUS_OK;
14251}
14252
14253/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014254 * FUNCTION : setPAAFSupport
14255 *
14256 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14257 * feature mask according to stream type and filter
14258 * arrangement
14259 *
14260 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14261 * @stream_type: stream type
14262 * @filter_arrangement: filter arrangement
14263 *
14264 * RETURN : None
14265 *==========================================================================*/
14266void QCamera3HardwareInterface::setPAAFSupport(
14267 cam_feature_mask_t& feature_mask,
14268 cam_stream_type_t stream_type,
14269 cam_color_filter_arrangement_t filter_arrangement)
14270{
Thierry Strudel3d639192016-09-09 11:52:26 -070014271 switch (filter_arrangement) {
14272 case CAM_FILTER_ARRANGEMENT_RGGB:
14273 case CAM_FILTER_ARRANGEMENT_GRBG:
14274 case CAM_FILTER_ARRANGEMENT_GBRG:
14275 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014276 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14277 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014278 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014279 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14280 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014281 }
14282 break;
14283 case CAM_FILTER_ARRANGEMENT_Y:
14284 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14285 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14286 }
14287 break;
14288 default:
14289 break;
14290 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014291 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14292 feature_mask, stream_type, filter_arrangement);
14293
14294
Thierry Strudel3d639192016-09-09 11:52:26 -070014295}
14296
14297/*===========================================================================
14298* FUNCTION : getSensorMountAngle
14299*
14300* DESCRIPTION: Retrieve sensor mount angle
14301*
14302* PARAMETERS : None
14303*
14304* RETURN : sensor mount angle in uint32_t
14305*==========================================================================*/
14306uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14307{
14308 return gCamCapability[mCameraId]->sensor_mount_angle;
14309}
14310
14311/*===========================================================================
14312* FUNCTION : getRelatedCalibrationData
14313*
14314* DESCRIPTION: Retrieve related system calibration data
14315*
14316* PARAMETERS : None
14317*
14318* RETURN : Pointer of related system calibration data
14319*==========================================================================*/
14320const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14321{
14322 return (const cam_related_system_calibration_data_t *)
14323 &(gCamCapability[mCameraId]->related_cam_calibration);
14324}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014325
14326/*===========================================================================
14327 * FUNCTION : is60HzZone
14328 *
14329 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14330 *
14331 * PARAMETERS : None
14332 *
14333 * RETURN : True if in 60Hz zone, False otherwise
14334 *==========================================================================*/
14335bool QCamera3HardwareInterface::is60HzZone()
14336{
14337 time_t t = time(NULL);
14338 struct tm lt;
14339
14340 struct tm* r = localtime_r(&t, &lt);
14341
14342 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14343 return true;
14344 else
14345 return false;
14346}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014347
14348/*===========================================================================
14349 * FUNCTION : adjustBlackLevelForCFA
14350 *
14351 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14352 * of bayer CFA (Color Filter Array).
14353 *
14354 * PARAMETERS : @input: black level pattern in the order of RGGB
14355 * @output: black level pattern in the order of CFA
14356 * @color_arrangement: CFA color arrangement
14357 *
14358 * RETURN : None
14359 *==========================================================================*/
14360template<typename T>
14361void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14362 T input[BLACK_LEVEL_PATTERN_CNT],
14363 T output[BLACK_LEVEL_PATTERN_CNT],
14364 cam_color_filter_arrangement_t color_arrangement)
14365{
14366 switch (color_arrangement) {
14367 case CAM_FILTER_ARRANGEMENT_GRBG:
14368 output[0] = input[1];
14369 output[1] = input[0];
14370 output[2] = input[3];
14371 output[3] = input[2];
14372 break;
14373 case CAM_FILTER_ARRANGEMENT_GBRG:
14374 output[0] = input[2];
14375 output[1] = input[3];
14376 output[2] = input[0];
14377 output[3] = input[1];
14378 break;
14379 case CAM_FILTER_ARRANGEMENT_BGGR:
14380 output[0] = input[3];
14381 output[1] = input[2];
14382 output[2] = input[1];
14383 output[3] = input[0];
14384 break;
14385 case CAM_FILTER_ARRANGEMENT_RGGB:
14386 output[0] = input[0];
14387 output[1] = input[1];
14388 output[2] = input[2];
14389 output[3] = input[3];
14390 break;
14391 default:
14392 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14393 break;
14394 }
14395}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014396
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014397void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14398 CameraMetadata &resultMetadata,
14399 std::shared_ptr<metadata_buffer_t> settings)
14400{
14401 if (settings == nullptr) {
14402 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14403 return;
14404 }
14405
14406 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14407 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14408 }
14409
14410 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14411 String8 str((const char *)gps_methods);
14412 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14413 }
14414
14415 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14416 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14417 }
14418
14419 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14420 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14421 }
14422
14423 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14424 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14425 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14426 }
14427
14428 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14429 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14430 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14431 }
14432
14433 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14434 int32_t fwk_thumb_size[2];
14435 fwk_thumb_size[0] = thumb_size->width;
14436 fwk_thumb_size[1] = thumb_size->height;
14437 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14438 }
14439
14440 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14441 uint8_t fwk_intent = intent[0];
14442 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14443 }
14444}
14445
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014446bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14447 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14448 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014449{
14450 if (hdrPlusRequest == nullptr) return false;
14451
14452 // Check noise reduction mode is high quality.
14453 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14454 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14455 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014456 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14457 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014458 return false;
14459 }
14460
14461 // Check edge mode is high quality.
14462 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14463 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14464 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14465 return false;
14466 }
14467
14468 if (request.num_output_buffers != 1 ||
14469 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14470 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014471 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14472 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14473 request.output_buffers[0].stream->width,
14474 request.output_buffers[0].stream->height,
14475 request.output_buffers[0].stream->format);
14476 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014477 return false;
14478 }
14479
14480 // Get a YUV buffer from pic channel.
14481 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14482 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14483 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14484 if (res != OK) {
14485 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14486 __FUNCTION__, strerror(-res), res);
14487 return false;
14488 }
14489
14490 pbcamera::StreamBuffer buffer;
14491 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014492 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014493 buffer.data = yuvBuffer->buffer;
14494 buffer.dataSize = yuvBuffer->frame_len;
14495
14496 pbcamera::CaptureRequest pbRequest;
14497 pbRequest.id = request.frame_number;
14498 pbRequest.outputBuffers.push_back(buffer);
14499
14500 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014501 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014502 if (res != OK) {
14503 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14504 strerror(-res), res);
14505 return false;
14506 }
14507
14508 hdrPlusRequest->yuvBuffer = yuvBuffer;
14509 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14510
14511 return true;
14512}
14513
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014514status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14515{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014516 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14517 return OK;
14518 }
14519
14520 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14521 if (res != OK) {
14522 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14523 strerror(-res), res);
14524 return res;
14525 }
14526 gHdrPlusClientOpening = true;
14527
14528 return OK;
14529}
14530
Chien-Yu Chenee335912017-02-09 17:53:20 -080014531status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14532{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014533 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014534
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014535 if (mHdrPlusModeEnabled) {
14536 return OK;
14537 }
14538
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014539 // Check if gHdrPlusClient is opened or being opened.
14540 if (gHdrPlusClient == nullptr) {
14541 if (gHdrPlusClientOpening) {
14542 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14543 return OK;
14544 }
14545
14546 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014547 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014548 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14549 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014550 return res;
14551 }
14552
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014553 // When opening HDR+ client completes, HDR+ mode will be enabled.
14554 return OK;
14555
Chien-Yu Chenee335912017-02-09 17:53:20 -080014556 }
14557
14558 // Configure stream for HDR+.
14559 res = configureHdrPlusStreamsLocked();
14560 if (res != OK) {
14561 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014562 return res;
14563 }
14564
14565 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14566 res = gHdrPlusClient->setZslHdrPlusMode(true);
14567 if (res != OK) {
14568 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014569 return res;
14570 }
14571
14572 mHdrPlusModeEnabled = true;
14573 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14574
14575 return OK;
14576}
14577
14578void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14579{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014580 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014581 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014582 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14583 if (res != OK) {
14584 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14585 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014586
14587 // Close HDR+ client so Easel can enter low power mode.
14588 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14589 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014590 }
14591
14592 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014593 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014594 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14595}
14596
14597status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014598{
14599 pbcamera::InputConfiguration inputConfig;
14600 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14601 status_t res = OK;
14602
14603 // Configure HDR+ client streams.
14604 // Get input config.
14605 if (mHdrPlusRawSrcChannel) {
14606 // HDR+ input buffers will be provided by HAL.
14607 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14608 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14609 if (res != OK) {
14610 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14611 __FUNCTION__, strerror(-res), res);
14612 return res;
14613 }
14614
14615 inputConfig.isSensorInput = false;
14616 } else {
14617 // Sensor MIPI will send data to Easel.
14618 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014619 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014620 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14621 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14622 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14623 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14624 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014625 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014626 if (mSensorModeInfo.num_raw_bits != 10) {
14627 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14628 mSensorModeInfo.num_raw_bits);
14629 return BAD_VALUE;
14630 }
14631
14632 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014633 }
14634
14635 // Get output configurations.
14636 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014637 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014638
14639 // Easel may need to output YUV output buffers if mPictureChannel was created.
14640 pbcamera::StreamConfiguration yuvOutputConfig;
14641 if (mPictureChannel != nullptr) {
14642 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14643 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14644 if (res != OK) {
14645 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14646 __FUNCTION__, strerror(-res), res);
14647
14648 return res;
14649 }
14650
14651 outputStreamConfigs.push_back(yuvOutputConfig);
14652 }
14653
14654 // TODO: consider other channels for YUV output buffers.
14655
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014656 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014657 if (res != OK) {
14658 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14659 strerror(-res), res);
14660 return res;
14661 }
14662
14663 return OK;
14664}
14665
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014666void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14667{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014668 if (client == nullptr) {
14669 ALOGE("%s: Opened client is null.", __FUNCTION__);
14670 return;
14671 }
14672
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014673 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014674 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14675
14676 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014677 if (!gHdrPlusClientOpening) {
14678 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14679 return;
14680 }
14681
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014682 gHdrPlusClient = std::move(client);
14683 gHdrPlusClientOpening = false;
14684
14685 // Set static metadata.
14686 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14687 if (res != OK) {
14688 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14689 __FUNCTION__, strerror(-res), res);
14690 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14691 gHdrPlusClient = nullptr;
14692 return;
14693 }
14694
14695 // Enable HDR+ mode.
14696 res = enableHdrPlusModeLocked();
14697 if (res != OK) {
14698 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14699 }
14700}
14701
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014702void QCamera3HardwareInterface::onOpenFailed(status_t err)
14703{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014704 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14705 Mutex::Autolock l(gHdrPlusClientLock);
14706 gHdrPlusClientOpening = false;
14707}
14708
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014709void QCamera3HardwareInterface::onFatalError()
14710{
14711 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14712
14713 // Set HAL state to error.
14714 pthread_mutex_lock(&mMutex);
14715 mState = ERROR;
14716 pthread_mutex_unlock(&mMutex);
14717
14718 handleCameraDeviceError();
14719}
14720
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014721void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014722 const camera_metadata_t &resultMetadata)
14723{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014724 if (result != nullptr) {
14725 if (result->outputBuffers.size() != 1) {
14726 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14727 result->outputBuffers.size());
14728 return;
14729 }
14730
14731 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14732 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14733 result->outputBuffers[0].streamId);
14734 return;
14735 }
14736
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014737 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014738 HdrPlusPendingRequest pendingRequest;
14739 {
14740 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14741 auto req = mHdrPlusPendingRequests.find(result->requestId);
14742 pendingRequest = req->second;
14743 }
14744
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014745 // Update the result metadata with the settings of the HDR+ still capture request because
14746 // the result metadata belongs to a ZSL buffer.
14747 CameraMetadata metadata;
14748 metadata = &resultMetadata;
14749 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14750 camera_metadata_t* updatedResultMetadata = metadata.release();
14751
14752 QCamera3PicChannel *picChannel =
14753 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14754
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014755 // Check if dumping HDR+ YUV output is enabled.
14756 char prop[PROPERTY_VALUE_MAX];
14757 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14758 bool dumpYuvOutput = atoi(prop);
14759
14760 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014761 // Dump yuv buffer to a ppm file.
14762 pbcamera::StreamConfiguration outputConfig;
14763 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14764 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14765 if (rc == OK) {
14766 char buf[FILENAME_MAX] = {};
14767 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14768 result->requestId, result->outputBuffers[0].streamId,
14769 outputConfig.image.width, outputConfig.image.height);
14770
14771 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14772 } else {
14773 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14774 __FUNCTION__, strerror(-rc), rc);
14775 }
14776 }
14777
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014778 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14779 auto halMetadata = std::make_shared<metadata_buffer_t>();
14780 clear_metadata_buffer(halMetadata.get());
14781
14782 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14783 // encoding.
14784 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14785 halStreamId, /*minFrameDuration*/0);
14786 if (res == OK) {
14787 // Return the buffer to pic channel for encoding.
14788 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14789 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14790 halMetadata);
14791 } else {
14792 // Return the buffer without encoding.
14793 // TODO: This should not happen but we may want to report an error buffer to camera
14794 // service.
14795 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14796 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14797 strerror(-res), res);
14798 }
14799
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014800 // Find the timestamp
14801 camera_metadata_ro_entry_t entry;
14802 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14803 ANDROID_SENSOR_TIMESTAMP, &entry);
14804 if (res != OK) {
14805 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14806 __FUNCTION__, result->requestId, strerror(-res), res);
14807 } else {
14808 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14809 }
14810
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014811 // Send HDR+ metadata to framework.
14812 {
14813 pthread_mutex_lock(&mMutex);
14814
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014815 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14816 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014817 pthread_mutex_unlock(&mMutex);
14818 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014819
14820 // Remove the HDR+ pending request.
14821 {
14822 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14823 auto req = mHdrPlusPendingRequests.find(result->requestId);
14824 mHdrPlusPendingRequests.erase(req);
14825 }
14826 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014827}
14828
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014829void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14830{
14831 if (failedResult == nullptr) {
14832 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14833 return;
14834 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014835
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014836 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014837
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014838 // Remove the pending HDR+ request.
14839 {
14840 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14841 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14842
14843 // Return the buffer to pic channel.
14844 QCamera3PicChannel *picChannel =
14845 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14846 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14847
14848 mHdrPlusPendingRequests.erase(pendingRequest);
14849 }
14850
14851 pthread_mutex_lock(&mMutex);
14852
14853 // Find the pending buffers.
14854 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14855 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14856 if (pendingBuffers->frame_number == failedResult->requestId) {
14857 break;
14858 }
14859 pendingBuffers++;
14860 }
14861
14862 // Send out buffer errors for the pending buffers.
14863 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14864 std::vector<camera3_stream_buffer_t> streamBuffers;
14865 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14866 // Prepare a stream buffer.
14867 camera3_stream_buffer_t streamBuffer = {};
14868 streamBuffer.stream = buffer.stream;
14869 streamBuffer.buffer = buffer.buffer;
14870 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14871 streamBuffer.acquire_fence = -1;
14872 streamBuffer.release_fence = -1;
14873
14874 streamBuffers.push_back(streamBuffer);
14875
14876 // Send out error buffer event.
14877 camera3_notify_msg_t notify_msg = {};
14878 notify_msg.type = CAMERA3_MSG_ERROR;
14879 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14880 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14881 notify_msg.message.error.error_stream = buffer.stream;
14882
14883 orchestrateNotify(&notify_msg);
14884 }
14885
14886 camera3_capture_result_t result = {};
14887 result.frame_number = pendingBuffers->frame_number;
14888 result.num_output_buffers = streamBuffers.size();
14889 result.output_buffers = &streamBuffers[0];
14890
14891 // Send out result with buffer errors.
14892 orchestrateResult(&result);
14893
14894 // Remove pending buffers.
14895 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14896 }
14897
14898 // Remove pending request.
14899 auto halRequest = mPendingRequestsList.begin();
14900 while (halRequest != mPendingRequestsList.end()) {
14901 if (halRequest->frame_number == failedResult->requestId) {
14902 mPendingRequestsList.erase(halRequest);
14903 break;
14904 }
14905 halRequest++;
14906 }
14907
14908 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014909}
14910
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014911
14912ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14913 mParent(parent) {}
14914
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014915void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014916{
14917 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014918
14919 if (isReprocess) {
14920 mReprocessShutters.emplace(frameNumber, Shutter());
14921 } else {
14922 mShutters.emplace(frameNumber, Shutter());
14923 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014924}
14925
14926void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14927{
14928 std::lock_guard<std::mutex> lock(mLock);
14929
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014930 std::map<uint32_t, Shutter> *shutters = nullptr;
14931
14932 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014933 auto shutter = mShutters.find(frameNumber);
14934 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014935 shutter = mReprocessShutters.find(frameNumber);
14936 if (shutter == mReprocessShutters.end()) {
14937 // Shutter was already sent.
14938 return;
14939 }
14940 shutters = &mReprocessShutters;
14941 } else {
14942 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014943 }
14944
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014945 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014946 shutter->second.ready = true;
14947 shutter->second.timestamp = timestamp;
14948
14949 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014950 shutter = shutters->begin();
14951 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014952 if (!shutter->second.ready) {
14953 // If this shutter is not ready, the following shutters can't be sent.
14954 break;
14955 }
14956
14957 camera3_notify_msg_t msg = {};
14958 msg.type = CAMERA3_MSG_SHUTTER;
14959 msg.message.shutter.frame_number = shutter->first;
14960 msg.message.shutter.timestamp = shutter->second.timestamp;
14961 mParent->orchestrateNotify(&msg);
14962
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014963 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014964 }
14965}
14966
14967void ShutterDispatcher::clear(uint32_t frameNumber)
14968{
14969 std::lock_guard<std::mutex> lock(mLock);
14970 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014971 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014972}
14973
14974void ShutterDispatcher::clear()
14975{
14976 std::lock_guard<std::mutex> lock(mLock);
14977
14978 // Log errors for stale shutters.
14979 for (auto &shutter : mShutters) {
14980 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14981 __FUNCTION__, shutter.first, shutter.second.ready,
14982 shutter.second.timestamp);
14983 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014984
14985 // Log errors for stale reprocess shutters.
14986 for (auto &shutter : mReprocessShutters) {
14987 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
14988 __FUNCTION__, shutter.first, shutter.second.ready,
14989 shutter.second.timestamp);
14990 }
14991
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014992 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014993 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014994}
14995
14996OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
14997 mParent(parent) {}
14998
14999status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15000{
15001 std::lock_guard<std::mutex> lock(mLock);
15002 mStreamBuffers.clear();
15003 if (!streamList) {
15004 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15005 return -EINVAL;
15006 }
15007
15008 // Create a "frame-number -> buffer" map for each stream.
15009 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15010 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15011 }
15012
15013 return OK;
15014}
15015
15016status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15017{
15018 std::lock_guard<std::mutex> lock(mLock);
15019
15020 // Find the "frame-number -> buffer" map for the stream.
15021 auto buffers = mStreamBuffers.find(stream);
15022 if (buffers == mStreamBuffers.end()) {
15023 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15024 return -EINVAL;
15025 }
15026
15027 // Create an unready buffer for this frame number.
15028 buffers->second.emplace(frameNumber, Buffer());
15029 return OK;
15030}
15031
15032void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15033 const camera3_stream_buffer_t &buffer)
15034{
15035 std::lock_guard<std::mutex> lock(mLock);
15036
15037 // Find the frame number -> buffer map for the stream.
15038 auto buffers = mStreamBuffers.find(buffer.stream);
15039 if (buffers == mStreamBuffers.end()) {
15040 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15041 return;
15042 }
15043
15044 // Find the unready buffer this frame number and mark it ready.
15045 auto pendingBuffer = buffers->second.find(frameNumber);
15046 if (pendingBuffer == buffers->second.end()) {
15047 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15048 return;
15049 }
15050
15051 pendingBuffer->second.ready = true;
15052 pendingBuffer->second.buffer = buffer;
15053
15054 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15055 pendingBuffer = buffers->second.begin();
15056 while (pendingBuffer != buffers->second.end()) {
15057 if (!pendingBuffer->second.ready) {
15058 // If this buffer is not ready, the following buffers can't be sent.
15059 break;
15060 }
15061
15062 camera3_capture_result_t result = {};
15063 result.frame_number = pendingBuffer->first;
15064 result.num_output_buffers = 1;
15065 result.output_buffers = &pendingBuffer->second.buffer;
15066
15067 // Send out result with buffer errors.
15068 mParent->orchestrateResult(&result);
15069
15070 pendingBuffer = buffers->second.erase(pendingBuffer);
15071 }
15072}
15073
15074void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15075{
15076 std::lock_guard<std::mutex> lock(mLock);
15077
15078 // Log errors for stale buffers.
15079 for (auto &buffers : mStreamBuffers) {
15080 for (auto &buffer : buffers.second) {
15081 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15082 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15083 }
15084 buffers.second.clear();
15085 }
15086
15087 if (clearConfiguredStreams) {
15088 mStreamBuffers.clear();
15089 }
15090}
15091
Thierry Strudel3d639192016-09-09 11:52:26 -070015092}; //end namespace qcamera