blob: c299ea21f913b11a320593e870711a91600c5ed9 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
Jiyong Parkd4caeb72017-06-12 17:16:36 +090068using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070069using namespace android;
70
71namespace qcamera {
72
73#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
74
75#define EMPTY_PIPELINE_DELAY 2
76#define PARTIAL_RESULT_COUNT 2
77#define FRAME_SKIP_DELAY 0
78
79#define MAX_VALUE_8BIT ((1<<8)-1)
80#define MAX_VALUE_10BIT ((1<<10)-1)
81#define MAX_VALUE_12BIT ((1<<12)-1)
82
83#define VIDEO_4K_WIDTH 3840
84#define VIDEO_4K_HEIGHT 2160
85
Jason Leeb9e76432017-03-10 17:14:19 -080086#define MAX_EIS_WIDTH 3840
87#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070088
89#define MAX_RAW_STREAMS 1
90#define MAX_STALLING_STREAMS 1
91#define MAX_PROCESSED_STREAMS 3
92/* Batch mode is enabled only if FPS set is equal to or greater than this */
93#define MIN_FPS_FOR_BATCH_MODE (120)
94#define PREVIEW_FPS_FOR_HFR (30)
95#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080096#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070097#define MAX_HFR_BATCH_SIZE (8)
98#define REGIONS_TUPLE_COUNT 5
99#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
101#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
148EaselManagerClient gEaselManagerClient;
149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700159std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
279};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_flash_mode_t,
283 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
284 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
285 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
286 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
287};
288
289const QCamera3HardwareInterface::QCameraMap<
290 camera_metadata_enum_android_statistics_face_detect_mode_t,
291 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
295};
296
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
299 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
301 CAM_FOCUS_UNCALIBRATED },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
303 CAM_FOCUS_APPROXIMATE },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
305 CAM_FOCUS_CALIBRATED }
306};
307
308const QCamera3HardwareInterface::QCameraMap<
309 camera_metadata_enum_android_lens_state_t,
310 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
311 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
312 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
313};
314
315const int32_t available_thumbnail_sizes[] = {0, 0,
316 176, 144,
317 240, 144,
318 256, 144,
319 240, 160,
320 256, 154,
321 240, 240,
322 320, 240};
323
324const QCamera3HardwareInterface::QCameraMap<
325 camera_metadata_enum_android_sensor_test_pattern_mode_t,
326 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
333};
334
335/* Since there is no mapping for all the options some Android enum are not listed.
336 * Also, the order in this list is important because while mapping from HAL to Android it will
337 * traverse from lower to higher index which means that for HAL values that are map to different
338 * Android values, the traverse logic will select the first one found.
339 */
340const QCamera3HardwareInterface::QCameraMap<
341 camera_metadata_enum_android_sensor_reference_illuminant1_t,
342 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
359};
360
361const QCamera3HardwareInterface::QCameraMap<
362 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
363 { 60, CAM_HFR_MODE_60FPS},
364 { 90, CAM_HFR_MODE_90FPS},
365 { 120, CAM_HFR_MODE_120FPS},
366 { 150, CAM_HFR_MODE_150FPS},
367 { 180, CAM_HFR_MODE_180FPS},
368 { 210, CAM_HFR_MODE_210FPS},
369 { 240, CAM_HFR_MODE_240FPS},
370 { 480, CAM_HFR_MODE_480FPS},
371};
372
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700373const QCamera3HardwareInterface::QCameraMap<
374 qcamera3_ext_instant_aec_mode_t,
375 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
376 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
377 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
379};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800380
381const QCamera3HardwareInterface::QCameraMap<
382 qcamera3_ext_exposure_meter_mode_t,
383 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
384 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
386 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
387 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
388 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
390 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
391};
392
393const QCamera3HardwareInterface::QCameraMap<
394 qcamera3_ext_iso_mode_t,
395 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
396 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
397 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
398 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
399 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
400 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
401 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
402 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
403 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
404};
405
Thierry Strudel3d639192016-09-09 11:52:26 -0700406camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
407 .initialize = QCamera3HardwareInterface::initialize,
408 .configure_streams = QCamera3HardwareInterface::configure_streams,
409 .register_stream_buffers = NULL,
410 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
411 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
412 .get_metadata_vendor_tag_ops = NULL,
413 .dump = QCamera3HardwareInterface::dump,
414 .flush = QCamera3HardwareInterface::flush,
415 .reserved = {0},
416};
417
418// initialise to some default value
419uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
420
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700421static inline void logEaselEvent(const char *tag, const char *event) {
422 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
423 struct timespec ts = {};
424 static int64_t kMsPerSec = 1000;
425 static int64_t kNsPerMs = 1000000;
426 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
427 if (res != OK) {
428 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
429 } else {
430 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
431 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
432 }
433 }
434}
435
Thierry Strudel3d639192016-09-09 11:52:26 -0700436/*===========================================================================
437 * FUNCTION : QCamera3HardwareInterface
438 *
439 * DESCRIPTION: constructor of QCamera3HardwareInterface
440 *
441 * PARAMETERS :
442 * @cameraId : camera ID
443 *
444 * RETURN : none
445 *==========================================================================*/
446QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
447 const camera_module_callbacks_t *callbacks)
448 : mCameraId(cameraId),
449 mCameraHandle(NULL),
450 mCameraInitialized(false),
451 mCallbackOps(NULL),
452 mMetadataChannel(NULL),
453 mPictureChannel(NULL),
454 mRawChannel(NULL),
455 mSupportChannel(NULL),
456 mAnalysisChannel(NULL),
457 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700458 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700459 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800460 mDepthChannel(NULL),
Emilian Peev666f5142017-06-02 16:47:04 +0100461 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800462 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mChannelHandle(0),
464 mFirstConfiguration(true),
465 mFlush(false),
466 mFlushPerf(false),
467 mParamHeap(NULL),
468 mParameters(NULL),
469 mPrevParameters(NULL),
470 m_bIsVideo(false),
471 m_bIs4KVideo(false),
472 m_bEisSupportedSize(false),
473 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800474 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700476 mShutterDispatcher(this),
477 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mMinProcessedFrameDuration(0),
479 mMinJpegFrameDuration(0),
480 mMinRawFrameDuration(0),
481 mMetaFrameCount(0U),
482 mUpdateDebugLevel(false),
483 mCallbacks(callbacks),
484 mCaptureIntent(0),
485 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700486 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800487 /* DevCamDebug metadata internal m control*/
488 mDevCamDebugMetaEnable(0),
489 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700490 mBatchSize(0),
491 mToBeQueuedVidBufs(0),
492 mHFRVideoFps(DEFAULT_VIDEO_FPS),
493 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800494 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800495 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700496 mFirstFrameNumberInBatch(0),
497 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800498 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700499 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
500 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000501 mPDSupported(false),
502 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700503 mInstantAEC(false),
504 mResetInstantAEC(false),
505 mInstantAECSettledFrameNumber(0),
506 mAecSkipDisplayFrameBound(0),
507 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800508 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700509 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700511 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mState(CLOSED),
513 mIsDeviceLinked(false),
514 mIsMainCamera(true),
515 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700516 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800517 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800518 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700519 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mIsApInputUsedForHdrPlus(false),
521 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800522 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700523{
524 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700525 mCommon.init(gCamCapability[cameraId]);
526 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700527#ifndef USE_HAL_3_3
528 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
529#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700531#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.close = close_camera_device;
533 mCameraDevice.ops = &mCameraOps;
534 mCameraDevice.priv = this;
535 gCamCapability[cameraId]->version = CAM_HAL_V3;
536 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
537 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
538 gCamCapability[cameraId]->min_num_pp_bufs = 3;
539
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800540 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543 mPendingLiveRequest = 0;
544 mCurrentRequestId = -1;
545 pthread_mutex_init(&mMutex, NULL);
546
547 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
548 mDefaultMetadata[i] = NULL;
549
550 // Getting system props of different kinds
551 char prop[PROPERTY_VALUE_MAX];
552 memset(prop, 0, sizeof(prop));
553 property_get("persist.camera.raw.dump", prop, "0");
554 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800555 property_get("persist.camera.hal3.force.hdr", prop, "0");
556 mForceHdrSnapshot = atoi(prop);
557
Thierry Strudel3d639192016-09-09 11:52:26 -0700558 if (mEnableRawDump)
559 LOGD("Raw dump from Camera HAL enabled");
560
561 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
562 memset(mLdafCalib, 0, sizeof(mLdafCalib));
563
564 memset(prop, 0, sizeof(prop));
565 property_get("persist.camera.tnr.preview", prop, "0");
566 m_bTnrPreview = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800569 property_get("persist.camera.swtnr.preview", prop, "1");
570 m_bSwTnrPreview = (uint8_t)atoi(prop);
571
572 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700573 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700574 m_bTnrVideo = (uint8_t)atoi(prop);
575
576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.avtimer.debug", prop, "0");
578 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800579 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700580
Thierry Strudel54dc9782017-02-15 12:12:10 -0800581 memset(prop, 0, sizeof(prop));
582 property_get("persist.camera.cacmode.disable", prop, "0");
583 m_cacModeDisabled = (uint8_t)atoi(prop);
584
Thierry Strudel3d639192016-09-09 11:52:26 -0700585 //Load and read GPU library.
586 lib_surface_utils = NULL;
587 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700588 mSurfaceStridePadding = CAM_PAD_TO_64;
589#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700590 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
591 if (lib_surface_utils) {
592 *(void **)&LINK_get_surface_pixel_alignment =
593 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
594 if (LINK_get_surface_pixel_alignment) {
595 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
596 }
597 dlclose(lib_surface_utils);
598 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700599#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000600 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
601 mPDSupported = (0 <= mPDIndex) ? true : false;
602
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700603 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700604}
605
606/*===========================================================================
607 * FUNCTION : ~QCamera3HardwareInterface
608 *
609 * DESCRIPTION: destructor of QCamera3HardwareInterface
610 *
611 * PARAMETERS : none
612 *
613 * RETURN : none
614 *==========================================================================*/
615QCamera3HardwareInterface::~QCamera3HardwareInterface()
616{
617 LOGD("E");
618
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800619 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700620
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800621 // Disable power hint and enable the perf lock for close camera
622 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
623 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
624
625 // unlink of dualcam during close camera
626 if (mIsDeviceLinked) {
627 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
628 &m_pDualCamCmdPtr->bundle_info;
629 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
630 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
631 pthread_mutex_lock(&gCamLock);
632
633 if (mIsMainCamera == 1) {
634 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
635 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
636 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
637 // related session id should be session id of linked session
638 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
639 } else {
640 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
641 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
642 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
643 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
644 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800645 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800646 pthread_mutex_unlock(&gCamLock);
647
648 rc = mCameraHandle->ops->set_dual_cam_cmd(
649 mCameraHandle->camera_handle);
650 if (rc < 0) {
651 LOGE("Dualcam: Unlink failed, but still proceed to close");
652 }
653 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700654
655 /* We need to stop all streams before deleting any stream */
656 if (mRawDumpChannel) {
657 mRawDumpChannel->stop();
658 }
659
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700660 if (mHdrPlusRawSrcChannel) {
661 mHdrPlusRawSrcChannel->stop();
662 }
663
Thierry Strudel3d639192016-09-09 11:52:26 -0700664 // NOTE: 'camera3_stream_t *' objects are already freed at
665 // this stage by the framework
666 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
667 it != mStreamInfo.end(); it++) {
668 QCamera3ProcessingChannel *channel = (*it)->channel;
669 if (channel) {
670 channel->stop();
671 }
672 }
673 if (mSupportChannel)
674 mSupportChannel->stop();
675
676 if (mAnalysisChannel) {
677 mAnalysisChannel->stop();
678 }
679 if (mMetadataChannel) {
680 mMetadataChannel->stop();
681 }
682 if (mChannelHandle) {
683 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
684 mChannelHandle);
685 LOGD("stopping channel %d", mChannelHandle);
686 }
687
688 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
689 it != mStreamInfo.end(); it++) {
690 QCamera3ProcessingChannel *channel = (*it)->channel;
691 if (channel)
692 delete channel;
693 free (*it);
694 }
695 if (mSupportChannel) {
696 delete mSupportChannel;
697 mSupportChannel = NULL;
698 }
699
700 if (mAnalysisChannel) {
701 delete mAnalysisChannel;
702 mAnalysisChannel = NULL;
703 }
704 if (mRawDumpChannel) {
705 delete mRawDumpChannel;
706 mRawDumpChannel = NULL;
707 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700708 if (mHdrPlusRawSrcChannel) {
709 delete mHdrPlusRawSrcChannel;
710 mHdrPlusRawSrcChannel = NULL;
711 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 if (mDummyBatchChannel) {
713 delete mDummyBatchChannel;
714 mDummyBatchChannel = NULL;
715 }
716
717 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800718 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700719
720 if (mMetadataChannel) {
721 delete mMetadataChannel;
722 mMetadataChannel = NULL;
723 }
724
725 /* Clean up all channels */
726 if (mCameraInitialized) {
727 if(!mFirstConfiguration){
728 //send the last unconfigure
729 cam_stream_size_info_t stream_config_info;
730 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
731 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
732 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800733 m_bIs4KVideo ? 0 :
734 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700735 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700736 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
737 stream_config_info);
738 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
739 if (rc < 0) {
740 LOGE("set_parms failed for unconfigure");
741 }
742 }
743 deinitParameters();
744 }
745
746 if (mChannelHandle) {
747 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
748 mChannelHandle);
749 LOGH("deleting channel %d", mChannelHandle);
750 mChannelHandle = 0;
751 }
752
753 if (mState != CLOSED)
754 closeCamera();
755
756 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
757 req.mPendingBufferList.clear();
758 }
759 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700760 for (pendingRequestIterator i = mPendingRequestsList.begin();
761 i != mPendingRequestsList.end();) {
762 i = erasePendingRequest(i);
763 }
764 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
765 if (mDefaultMetadata[i])
766 free_camera_metadata(mDefaultMetadata[i]);
767
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800768 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700769
770 pthread_cond_destroy(&mRequestCond);
771
772 pthread_cond_destroy(&mBuffersCond);
773
774 pthread_mutex_destroy(&mMutex);
775 LOGD("X");
776}
777
778/*===========================================================================
779 * FUNCTION : erasePendingRequest
780 *
781 * DESCRIPTION: function to erase a desired pending request after freeing any
782 * allocated memory
783 *
784 * PARAMETERS :
785 * @i : iterator pointing to pending request to be erased
786 *
787 * RETURN : iterator pointing to the next request
788 *==========================================================================*/
789QCamera3HardwareInterface::pendingRequestIterator
790 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
791{
792 if (i->input_buffer != NULL) {
793 free(i->input_buffer);
794 i->input_buffer = NULL;
795 }
796 if (i->settings != NULL)
797 free_camera_metadata((camera_metadata_t*)i->settings);
798 return mPendingRequestsList.erase(i);
799}
800
801/*===========================================================================
802 * FUNCTION : camEvtHandle
803 *
804 * DESCRIPTION: Function registered to mm-camera-interface to handle events
805 *
806 * PARAMETERS :
807 * @camera_handle : interface layer camera handle
808 * @evt : ptr to event
809 * @user_data : user data ptr
810 *
811 * RETURN : none
812 *==========================================================================*/
813void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
814 mm_camera_event_t *evt,
815 void *user_data)
816{
817 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
818 if (obj && evt) {
819 switch(evt->server_event_type) {
820 case CAM_EVENT_TYPE_DAEMON_DIED:
821 pthread_mutex_lock(&obj->mMutex);
822 obj->mState = ERROR;
823 pthread_mutex_unlock(&obj->mMutex);
824 LOGE("Fatal, camera daemon died");
825 break;
826
827 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
828 LOGD("HAL got request pull from Daemon");
829 pthread_mutex_lock(&obj->mMutex);
830 obj->mWokenUpByDaemon = true;
831 obj->unblockRequestIfNecessary();
832 pthread_mutex_unlock(&obj->mMutex);
833 break;
834
835 default:
836 LOGW("Warning: Unhandled event %d",
837 evt->server_event_type);
838 break;
839 }
840 } else {
841 LOGE("NULL user_data/evt");
842 }
843}
844
845/*===========================================================================
846 * FUNCTION : openCamera
847 *
848 * DESCRIPTION: open camera
849 *
850 * PARAMETERS :
851 * @hw_device : double ptr for camera device struct
852 *
853 * RETURN : int32_t type of status
854 * NO_ERROR -- success
855 * none-zero failure code
856 *==========================================================================*/
857int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
858{
859 int rc = 0;
860 if (mState != CLOSED) {
861 *hw_device = NULL;
862 return PERMISSION_DENIED;
863 }
864
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700865 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800866 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700867 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
868 mCameraId);
869
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700870 if (mCameraHandle) {
871 LOGE("Failure: Camera already opened");
872 return ALREADY_EXISTS;
873 }
874
875 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700876 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700877 if (gEaselManagerClient.isEaselPresentOnDevice()) {
878 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
879 rc = gEaselManagerClient.resume();
880 if (rc != 0) {
881 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
882 return rc;
883 }
884 }
885 }
886
Thierry Strudel3d639192016-09-09 11:52:26 -0700887 rc = openCamera();
888 if (rc == 0) {
889 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800890 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700891 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700892
893 // Suspend Easel because opening camera failed.
894 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700895 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700896 if (gEaselManagerClient.isEaselPresentOnDevice()) {
897 status_t suspendErr = gEaselManagerClient.suspend();
898 if (suspendErr != 0) {
899 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
900 strerror(-suspendErr), suspendErr);
901 }
902 }
903 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800904 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700905
Thierry Strudel3d639192016-09-09 11:52:26 -0700906 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
907 mCameraId, rc);
908
909 if (rc == NO_ERROR) {
910 mState = OPENED;
911 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800912
Thierry Strudel3d639192016-09-09 11:52:26 -0700913 return rc;
914}
915
916/*===========================================================================
917 * FUNCTION : openCamera
918 *
919 * DESCRIPTION: open camera
920 *
921 * PARAMETERS : none
922 *
923 * RETURN : int32_t type of status
924 * NO_ERROR -- success
925 * none-zero failure code
926 *==========================================================================*/
927int QCamera3HardwareInterface::openCamera()
928{
929 int rc = 0;
930 char value[PROPERTY_VALUE_MAX];
931
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800932 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800933
Thierry Strudel3d639192016-09-09 11:52:26 -0700934 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
935 if (rc < 0) {
936 LOGE("Failed to reserve flash for camera id: %d",
937 mCameraId);
938 return UNKNOWN_ERROR;
939 }
940
941 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
942 if (rc) {
943 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
944 return rc;
945 }
946
947 if (!mCameraHandle) {
948 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
949 return -ENODEV;
950 }
951
952 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
953 camEvtHandle, (void *)this);
954
955 if (rc < 0) {
956 LOGE("Error, failed to register event callback");
957 /* Not closing camera here since it is already handled in destructor */
958 return FAILED_TRANSACTION;
959 }
960
961 mExifParams.debug_params =
962 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
963 if (mExifParams.debug_params) {
964 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
965 } else {
966 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
967 return NO_MEMORY;
968 }
969 mFirstConfiguration = true;
970
971 //Notify display HAL that a camera session is active.
972 //But avoid calling the same during bootup because camera service might open/close
973 //cameras at boot time during its initialization and display service will also internally
974 //wait for camera service to initialize first while calling this display API, resulting in a
975 //deadlock situation. Since boot time camera open/close calls are made only to fetch
976 //capabilities, no need of this display bw optimization.
977 //Use "service.bootanim.exit" property to know boot status.
978 property_get("service.bootanim.exit", value, "0");
979 if (atoi(value) == 1) {
980 pthread_mutex_lock(&gCamLock);
981 if (gNumCameraSessions++ == 0) {
982 setCameraLaunchStatus(true);
983 }
984 pthread_mutex_unlock(&gCamLock);
985 }
986
987 //fill the session id needed while linking dual cam
988 pthread_mutex_lock(&gCamLock);
989 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
990 &sessionId[mCameraId]);
991 pthread_mutex_unlock(&gCamLock);
992
993 if (rc < 0) {
994 LOGE("Error, failed to get sessiion id");
995 return UNKNOWN_ERROR;
996 } else {
997 //Allocate related cam sync buffer
998 //this is needed for the payload that goes along with bundling cmd for related
999 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001000 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1001 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001002 if(rc != OK) {
1003 rc = NO_MEMORY;
1004 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1005 return NO_MEMORY;
1006 }
1007
1008 //Map memory for related cam sync buffer
1009 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001010 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1011 m_pDualCamCmdHeap->getFd(0),
1012 sizeof(cam_dual_camera_cmd_info_t),
1013 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001014 if(rc < 0) {
1015 LOGE("Dualcam: failed to map Related cam sync buffer");
1016 rc = FAILED_TRANSACTION;
1017 return NO_MEMORY;
1018 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001019 m_pDualCamCmdPtr =
1020 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001021 }
1022
1023 LOGH("mCameraId=%d",mCameraId);
1024
1025 return NO_ERROR;
1026}
1027
1028/*===========================================================================
1029 * FUNCTION : closeCamera
1030 *
1031 * DESCRIPTION: close camera
1032 *
1033 * PARAMETERS : none
1034 *
1035 * RETURN : int32_t type of status
1036 * NO_ERROR -- success
1037 * none-zero failure code
1038 *==========================================================================*/
1039int QCamera3HardwareInterface::closeCamera()
1040{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001041 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001042 int rc = NO_ERROR;
1043 char value[PROPERTY_VALUE_MAX];
1044
1045 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1046 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001047
1048 // unmap memory for related cam sync buffer
1049 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001050 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001051 if (NULL != m_pDualCamCmdHeap) {
1052 m_pDualCamCmdHeap->deallocate();
1053 delete m_pDualCamCmdHeap;
1054 m_pDualCamCmdHeap = NULL;
1055 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001056 }
1057
Thierry Strudel3d639192016-09-09 11:52:26 -07001058 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1059 mCameraHandle = NULL;
1060
1061 //reset session id to some invalid id
1062 pthread_mutex_lock(&gCamLock);
1063 sessionId[mCameraId] = 0xDEADBEEF;
1064 pthread_mutex_unlock(&gCamLock);
1065
1066 //Notify display HAL that there is no active camera session
1067 //but avoid calling the same during bootup. Refer to openCamera
1068 //for more details.
1069 property_get("service.bootanim.exit", value, "0");
1070 if (atoi(value) == 1) {
1071 pthread_mutex_lock(&gCamLock);
1072 if (--gNumCameraSessions == 0) {
1073 setCameraLaunchStatus(false);
1074 }
1075 pthread_mutex_unlock(&gCamLock);
1076 }
1077
Thierry Strudel3d639192016-09-09 11:52:26 -07001078 if (mExifParams.debug_params) {
1079 free(mExifParams.debug_params);
1080 mExifParams.debug_params = NULL;
1081 }
1082 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1083 LOGW("Failed to release flash for camera id: %d",
1084 mCameraId);
1085 }
1086 mState = CLOSED;
1087 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1088 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001089
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001090 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001091 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1092 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001093 if (gHdrPlusClient != nullptr) {
1094 // Disable HDR+ mode.
1095 disableHdrPlusModeLocked();
1096 // Disconnect Easel if it's connected.
1097 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1098 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001099 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001100
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001101 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001102 rc = gEaselManagerClient.stopMipi(mCameraId);
1103 if (rc != 0) {
1104 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1105 }
1106
1107 rc = gEaselManagerClient.suspend();
1108 if (rc != 0) {
1109 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1110 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001111 }
1112 }
1113
Thierry Strudel3d639192016-09-09 11:52:26 -07001114 return rc;
1115}
1116
1117/*===========================================================================
1118 * FUNCTION : initialize
1119 *
1120 * DESCRIPTION: Initialize frameworks callback functions
1121 *
1122 * PARAMETERS :
1123 * @callback_ops : callback function to frameworks
1124 *
1125 * RETURN :
1126 *
1127 *==========================================================================*/
1128int QCamera3HardwareInterface::initialize(
1129 const struct camera3_callback_ops *callback_ops)
1130{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001131 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001132 int rc;
1133
1134 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1135 pthread_mutex_lock(&mMutex);
1136
1137 // Validate current state
1138 switch (mState) {
1139 case OPENED:
1140 /* valid state */
1141 break;
1142 default:
1143 LOGE("Invalid state %d", mState);
1144 rc = -ENODEV;
1145 goto err1;
1146 }
1147
1148 rc = initParameters();
1149 if (rc < 0) {
1150 LOGE("initParamters failed %d", rc);
1151 goto err1;
1152 }
1153 mCallbackOps = callback_ops;
1154
1155 mChannelHandle = mCameraHandle->ops->add_channel(
1156 mCameraHandle->camera_handle, NULL, NULL, this);
1157 if (mChannelHandle == 0) {
1158 LOGE("add_channel failed");
1159 rc = -ENOMEM;
1160 pthread_mutex_unlock(&mMutex);
1161 return rc;
1162 }
1163
1164 pthread_mutex_unlock(&mMutex);
1165 mCameraInitialized = true;
1166 mState = INITIALIZED;
1167 LOGI("X");
1168 return 0;
1169
1170err1:
1171 pthread_mutex_unlock(&mMutex);
1172 return rc;
1173}
1174
1175/*===========================================================================
1176 * FUNCTION : validateStreamDimensions
1177 *
1178 * DESCRIPTION: Check if the configuration requested are those advertised
1179 *
1180 * PARAMETERS :
1181 * @stream_list : streams to be configured
1182 *
1183 * RETURN :
1184 *
1185 *==========================================================================*/
1186int QCamera3HardwareInterface::validateStreamDimensions(
1187 camera3_stream_configuration_t *streamList)
1188{
1189 int rc = NO_ERROR;
1190 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001191 uint32_t depthWidth = 0;
1192 uint32_t depthHeight = 0;
1193 if (mPDSupported) {
1194 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1195 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1196 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001197
1198 camera3_stream_t *inputStream = NULL;
1199 /*
1200 * Loop through all streams to find input stream if it exists*
1201 */
1202 for (size_t i = 0; i< streamList->num_streams; i++) {
1203 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1204 if (inputStream != NULL) {
1205 LOGE("Error, Multiple input streams requested");
1206 return -EINVAL;
1207 }
1208 inputStream = streamList->streams[i];
1209 }
1210 }
1211 /*
1212 * Loop through all streams requested in configuration
1213 * Check if unsupported sizes have been requested on any of them
1214 */
1215 for (size_t j = 0; j < streamList->num_streams; j++) {
1216 bool sizeFound = false;
1217 camera3_stream_t *newStream = streamList->streams[j];
1218
1219 uint32_t rotatedHeight = newStream->height;
1220 uint32_t rotatedWidth = newStream->width;
1221 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1222 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1223 rotatedHeight = newStream->width;
1224 rotatedWidth = newStream->height;
1225 }
1226
1227 /*
1228 * Sizes are different for each type of stream format check against
1229 * appropriate table.
1230 */
1231 switch (newStream->format) {
1232 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1233 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1234 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001235 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1236 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1237 mPDSupported) {
1238 if ((depthWidth == newStream->width) &&
1239 (depthHeight == newStream->height)) {
1240 sizeFound = true;
1241 }
1242 break;
1243 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001244 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1245 for (size_t i = 0; i < count; i++) {
1246 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1247 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1248 sizeFound = true;
1249 break;
1250 }
1251 }
1252 break;
1253 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001254 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1255 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001256 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001257 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001258 if ((depthSamplesCount == newStream->width) &&
1259 (1 == newStream->height)) {
1260 sizeFound = true;
1261 }
1262 break;
1263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1265 /* Verify set size against generated sizes table */
1266 for (size_t i = 0; i < count; i++) {
1267 if (((int32_t)rotatedWidth ==
1268 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1269 ((int32_t)rotatedHeight ==
1270 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1271 sizeFound = true;
1272 break;
1273 }
1274 }
1275 break;
1276 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1277 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1278 default:
1279 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1280 || newStream->stream_type == CAMERA3_STREAM_INPUT
1281 || IS_USAGE_ZSL(newStream->usage)) {
1282 if (((int32_t)rotatedWidth ==
1283 gCamCapability[mCameraId]->active_array_size.width) &&
1284 ((int32_t)rotatedHeight ==
1285 gCamCapability[mCameraId]->active_array_size.height)) {
1286 sizeFound = true;
1287 break;
1288 }
1289 /* We could potentially break here to enforce ZSL stream
1290 * set from frameworks always is full active array size
1291 * but it is not clear from the spc if framework will always
1292 * follow that, also we have logic to override to full array
1293 * size, so keeping the logic lenient at the moment
1294 */
1295 }
1296 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1297 MAX_SIZES_CNT);
1298 for (size_t i = 0; i < count; i++) {
1299 if (((int32_t)rotatedWidth ==
1300 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1301 ((int32_t)rotatedHeight ==
1302 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1303 sizeFound = true;
1304 break;
1305 }
1306 }
1307 break;
1308 } /* End of switch(newStream->format) */
1309
1310 /* We error out even if a single stream has unsupported size set */
1311 if (!sizeFound) {
1312 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1313 rotatedWidth, rotatedHeight, newStream->format,
1314 gCamCapability[mCameraId]->active_array_size.width,
1315 gCamCapability[mCameraId]->active_array_size.height);
1316 rc = -EINVAL;
1317 break;
1318 }
1319 } /* End of for each stream */
1320 return rc;
1321}
1322
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001323/*===========================================================================
1324 * FUNCTION : validateUsageFlags
1325 *
1326 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1327 *
1328 * PARAMETERS :
1329 * @stream_list : streams to be configured
1330 *
1331 * RETURN :
1332 * NO_ERROR if the usage flags are supported
1333 * error code if usage flags are not supported
1334 *
1335 *==========================================================================*/
1336int QCamera3HardwareInterface::validateUsageFlags(
1337 const camera3_stream_configuration_t* streamList)
1338{
1339 for (size_t j = 0; j < streamList->num_streams; j++) {
1340 const camera3_stream_t *newStream = streamList->streams[j];
1341
1342 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1343 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1344 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1345 continue;
1346 }
1347
Jason Leec4cf5032017-05-24 18:31:41 -07001348 // Here we only care whether it's EIS3 or not
1349 char is_type_value[PROPERTY_VALUE_MAX];
1350 property_get("persist.camera.is_type", is_type_value, "4");
1351 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1352 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1353 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1354 isType = IS_TYPE_NONE;
1355
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001356 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1357 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1358 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1359 bool forcePreviewUBWC = true;
1360 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1361 forcePreviewUBWC = false;
1362 }
1363 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001364 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001365 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001366 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001367 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001368 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001369
1370 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1371 // So color spaces will always match.
1372
1373 // Check whether underlying formats of shared streams match.
1374 if (isVideo && isPreview && videoFormat != previewFormat) {
1375 LOGE("Combined video and preview usage flag is not supported");
1376 return -EINVAL;
1377 }
1378 if (isPreview && isZSL && previewFormat != zslFormat) {
1379 LOGE("Combined preview and zsl usage flag is not supported");
1380 return -EINVAL;
1381 }
1382 if (isVideo && isZSL && videoFormat != zslFormat) {
1383 LOGE("Combined video and zsl usage flag is not supported");
1384 return -EINVAL;
1385 }
1386 }
1387 return NO_ERROR;
1388}
1389
1390/*===========================================================================
1391 * FUNCTION : validateUsageFlagsForEis
1392 *
1393 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1394 *
1395 * PARAMETERS :
1396 * @stream_list : streams to be configured
1397 *
1398 * RETURN :
1399 * NO_ERROR if the usage flags are supported
1400 * error code if usage flags are not supported
1401 *
1402 *==========================================================================*/
1403int QCamera3HardwareInterface::validateUsageFlagsForEis(
1404 const camera3_stream_configuration_t* streamList)
1405{
1406 for (size_t j = 0; j < streamList->num_streams; j++) {
1407 const camera3_stream_t *newStream = streamList->streams[j];
1408
1409 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1410 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1411
1412 // Because EIS is "hard-coded" for certain use case, and current
1413 // implementation doesn't support shared preview and video on the same
1414 // stream, return failure if EIS is forced on.
1415 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1416 LOGE("Combined video and preview usage flag is not supported due to EIS");
1417 return -EINVAL;
1418 }
1419 }
1420 return NO_ERROR;
1421}
1422
Thierry Strudel3d639192016-09-09 11:52:26 -07001423/*==============================================================================
1424 * FUNCTION : isSupportChannelNeeded
1425 *
1426 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1427 *
1428 * PARAMETERS :
1429 * @stream_list : streams to be configured
1430 * @stream_config_info : the config info for streams to be configured
1431 *
1432 * RETURN : Boolen true/false decision
1433 *
1434 *==========================================================================*/
1435bool QCamera3HardwareInterface::isSupportChannelNeeded(
1436 camera3_stream_configuration_t *streamList,
1437 cam_stream_size_info_t stream_config_info)
1438{
1439 uint32_t i;
1440 bool pprocRequested = false;
1441 /* Check for conditions where PProc pipeline does not have any streams*/
1442 for (i = 0; i < stream_config_info.num_streams; i++) {
1443 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1444 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1445 pprocRequested = true;
1446 break;
1447 }
1448 }
1449
1450 if (pprocRequested == false )
1451 return true;
1452
1453 /* Dummy stream needed if only raw or jpeg streams present */
1454 for (i = 0; i < streamList->num_streams; i++) {
1455 switch(streamList->streams[i]->format) {
1456 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1457 case HAL_PIXEL_FORMAT_RAW10:
1458 case HAL_PIXEL_FORMAT_RAW16:
1459 case HAL_PIXEL_FORMAT_BLOB:
1460 break;
1461 default:
1462 return false;
1463 }
1464 }
1465 return true;
1466}
1467
1468/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001469 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001470 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001471 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001472 *
1473 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001474 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001475 *
1476 * RETURN : int32_t type of status
1477 * NO_ERROR -- success
1478 * none-zero failure code
1479 *
1480 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001482{
1483 int32_t rc = NO_ERROR;
1484
1485 cam_dimension_t max_dim = {0, 0};
1486 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1487 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1488 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1489 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1490 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1491 }
1492
1493 clear_metadata_buffer(mParameters);
1494
1495 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1496 max_dim);
1497 if (rc != NO_ERROR) {
1498 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1499 return rc;
1500 }
1501
1502 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1503 if (rc != NO_ERROR) {
1504 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1505 return rc;
1506 }
1507
1508 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001509 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001510
1511 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1512 mParameters);
1513 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001514 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001515 return rc;
1516 }
1517
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001518 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001519 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1520 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1521 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1522 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1523 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001524
1525 return rc;
1526}
1527
1528/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001529 * FUNCTION : getCurrentSensorModeInfo
1530 *
1531 * DESCRIPTION: Get sensor mode information that is currently selected.
1532 *
1533 * PARAMETERS :
1534 * @sensorModeInfo : sensor mode information (output)
1535 *
1536 * RETURN : int32_t type of status
1537 * NO_ERROR -- success
1538 * none-zero failure code
1539 *
1540 *==========================================================================*/
1541int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1542{
1543 int32_t rc = NO_ERROR;
1544
1545 clear_metadata_buffer(mParameters);
1546 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1547
1548 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1549 mParameters);
1550 if (rc != NO_ERROR) {
1551 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1552 return rc;
1553 }
1554
1555 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1556 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1557 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1558 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1559 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1560 sensorModeInfo.num_raw_bits);
1561
1562 return rc;
1563}
1564
1565/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001566 * FUNCTION : addToPPFeatureMask
1567 *
1568 * DESCRIPTION: add additional features to pp feature mask based on
1569 * stream type and usecase
1570 *
1571 * PARAMETERS :
1572 * @stream_format : stream type for feature mask
1573 * @stream_idx : stream idx within postprocess_mask list to change
1574 *
1575 * RETURN : NULL
1576 *
1577 *==========================================================================*/
1578void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1579 uint32_t stream_idx)
1580{
1581 char feature_mask_value[PROPERTY_VALUE_MAX];
1582 cam_feature_mask_t feature_mask;
1583 int args_converted;
1584 int property_len;
1585
1586 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001587#ifdef _LE_CAMERA_
1588 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1589 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1590 property_len = property_get("persist.camera.hal3.feature",
1591 feature_mask_value, swtnr_feature_mask_value);
1592#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001593 property_len = property_get("persist.camera.hal3.feature",
1594 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001595#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001596 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1597 (feature_mask_value[1] == 'x')) {
1598 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1599 } else {
1600 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1601 }
1602 if (1 != args_converted) {
1603 feature_mask = 0;
1604 LOGE("Wrong feature mask %s", feature_mask_value);
1605 return;
1606 }
1607
1608 switch (stream_format) {
1609 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1610 /* Add LLVD to pp feature mask only if video hint is enabled */
1611 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1612 mStreamConfigInfo.postprocess_mask[stream_idx]
1613 |= CAM_QTI_FEATURE_SW_TNR;
1614 LOGH("Added SW TNR to pp feature mask");
1615 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1616 mStreamConfigInfo.postprocess_mask[stream_idx]
1617 |= CAM_QCOM_FEATURE_LLVD;
1618 LOGH("Added LLVD SeeMore to pp feature mask");
1619 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001620 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1621 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1622 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1623 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001624 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1625 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1626 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1627 CAM_QTI_FEATURE_BINNING_CORRECTION;
1628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001629 break;
1630 }
1631 default:
1632 break;
1633 }
1634 LOGD("PP feature mask %llx",
1635 mStreamConfigInfo.postprocess_mask[stream_idx]);
1636}
1637
1638/*==============================================================================
1639 * FUNCTION : updateFpsInPreviewBuffer
1640 *
1641 * DESCRIPTION: update FPS information in preview buffer.
1642 *
1643 * PARAMETERS :
1644 * @metadata : pointer to metadata buffer
1645 * @frame_number: frame_number to look for in pending buffer list
1646 *
1647 * RETURN : None
1648 *
1649 *==========================================================================*/
1650void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1651 uint32_t frame_number)
1652{
1653 // Mark all pending buffers for this particular request
1654 // with corresponding framerate information
1655 for (List<PendingBuffersInRequest>::iterator req =
1656 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1657 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1658 for(List<PendingBufferInfo>::iterator j =
1659 req->mPendingBufferList.begin();
1660 j != req->mPendingBufferList.end(); j++) {
1661 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1662 if ((req->frame_number == frame_number) &&
1663 (channel->getStreamTypeMask() &
1664 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1665 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1666 CAM_INTF_PARM_FPS_RANGE, metadata) {
1667 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1668 struct private_handle_t *priv_handle =
1669 (struct private_handle_t *)(*(j->buffer));
1670 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1671 }
1672 }
1673 }
1674 }
1675}
1676
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001677/*==============================================================================
1678 * FUNCTION : updateTimeStampInPendingBuffers
1679 *
1680 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1681 * of a frame number
1682 *
1683 * PARAMETERS :
1684 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1685 * @timestamp : timestamp to be set
1686 *
1687 * RETURN : None
1688 *
1689 *==========================================================================*/
1690void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1691 uint32_t frameNumber, nsecs_t timestamp)
1692{
1693 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1694 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1695 if (req->frame_number != frameNumber)
1696 continue;
1697
1698 for (auto k = req->mPendingBufferList.begin();
1699 k != req->mPendingBufferList.end(); k++ ) {
1700 struct private_handle_t *priv_handle =
1701 (struct private_handle_t *) (*(k->buffer));
1702 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1703 }
1704 }
1705 return;
1706}
1707
Thierry Strudel3d639192016-09-09 11:52:26 -07001708/*===========================================================================
1709 * FUNCTION : configureStreams
1710 *
1711 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1712 * and output streams.
1713 *
1714 * PARAMETERS :
1715 * @stream_list : streams to be configured
1716 *
1717 * RETURN :
1718 *
1719 *==========================================================================*/
1720int QCamera3HardwareInterface::configureStreams(
1721 camera3_stream_configuration_t *streamList)
1722{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001723 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001724 int rc = 0;
1725
1726 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001727 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001728 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001729 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001730
1731 return rc;
1732}
1733
1734/*===========================================================================
1735 * FUNCTION : configureStreamsPerfLocked
1736 *
1737 * DESCRIPTION: configureStreams while perfLock is held.
1738 *
1739 * PARAMETERS :
1740 * @stream_list : streams to be configured
1741 *
1742 * RETURN : int32_t type of status
1743 * NO_ERROR -- success
1744 * none-zero failure code
1745 *==========================================================================*/
1746int QCamera3HardwareInterface::configureStreamsPerfLocked(
1747 camera3_stream_configuration_t *streamList)
1748{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001749 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001750 int rc = 0;
1751
1752 // Sanity check stream_list
1753 if (streamList == NULL) {
1754 LOGE("NULL stream configuration");
1755 return BAD_VALUE;
1756 }
1757 if (streamList->streams == NULL) {
1758 LOGE("NULL stream list");
1759 return BAD_VALUE;
1760 }
1761
1762 if (streamList->num_streams < 1) {
1763 LOGE("Bad number of streams requested: %d",
1764 streamList->num_streams);
1765 return BAD_VALUE;
1766 }
1767
1768 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1769 LOGE("Maximum number of streams %d exceeded: %d",
1770 MAX_NUM_STREAMS, streamList->num_streams);
1771 return BAD_VALUE;
1772 }
1773
Jason Leec4cf5032017-05-24 18:31:41 -07001774 mOpMode = streamList->operation_mode;
1775 LOGD("mOpMode: %d", mOpMode);
1776
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001777 rc = validateUsageFlags(streamList);
1778 if (rc != NO_ERROR) {
1779 return rc;
1780 }
1781
Thierry Strudel3d639192016-09-09 11:52:26 -07001782 /* first invalidate all the steams in the mStreamList
1783 * if they appear again, they will be validated */
1784 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1785 it != mStreamInfo.end(); it++) {
1786 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1787 if (channel) {
1788 channel->stop();
1789 }
1790 (*it)->status = INVALID;
1791 }
1792
1793 if (mRawDumpChannel) {
1794 mRawDumpChannel->stop();
1795 delete mRawDumpChannel;
1796 mRawDumpChannel = NULL;
1797 }
1798
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001799 if (mHdrPlusRawSrcChannel) {
1800 mHdrPlusRawSrcChannel->stop();
1801 delete mHdrPlusRawSrcChannel;
1802 mHdrPlusRawSrcChannel = NULL;
1803 }
1804
Thierry Strudel3d639192016-09-09 11:52:26 -07001805 if (mSupportChannel)
1806 mSupportChannel->stop();
1807
1808 if (mAnalysisChannel) {
1809 mAnalysisChannel->stop();
1810 }
1811 if (mMetadataChannel) {
1812 /* If content of mStreamInfo is not 0, there is metadata stream */
1813 mMetadataChannel->stop();
1814 }
1815 if (mChannelHandle) {
1816 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1817 mChannelHandle);
1818 LOGD("stopping channel %d", mChannelHandle);
1819 }
1820
1821 pthread_mutex_lock(&mMutex);
1822
1823 // Check state
1824 switch (mState) {
1825 case INITIALIZED:
1826 case CONFIGURED:
1827 case STARTED:
1828 /* valid state */
1829 break;
1830 default:
1831 LOGE("Invalid state %d", mState);
1832 pthread_mutex_unlock(&mMutex);
1833 return -ENODEV;
1834 }
1835
1836 /* Check whether we have video stream */
1837 m_bIs4KVideo = false;
1838 m_bIsVideo = false;
1839 m_bEisSupportedSize = false;
1840 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001841 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001843 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001844 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 uint32_t videoWidth = 0U;
1846 uint32_t videoHeight = 0U;
1847 size_t rawStreamCnt = 0;
1848 size_t stallStreamCnt = 0;
1849 size_t processedStreamCnt = 0;
1850 // Number of streams on ISP encoder path
1851 size_t numStreamsOnEncoder = 0;
1852 size_t numYuv888OnEncoder = 0;
1853 bool bYuv888OverrideJpeg = false;
1854 cam_dimension_t largeYuv888Size = {0, 0};
1855 cam_dimension_t maxViewfinderSize = {0, 0};
1856 bool bJpegExceeds4K = false;
1857 bool bJpegOnEncoder = false;
1858 bool bUseCommonFeatureMask = false;
1859 cam_feature_mask_t commonFeatureMask = 0;
1860 bool bSmallJpegSize = false;
1861 uint32_t width_ratio;
1862 uint32_t height_ratio;
1863 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1864 camera3_stream_t *inputStream = NULL;
1865 bool isJpeg = false;
1866 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001867 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001868 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001869
1870 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1871
1872 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001873 uint8_t eis_prop_set;
1874 uint32_t maxEisWidth = 0;
1875 uint32_t maxEisHeight = 0;
1876
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001877 // Initialize all instant AEC related variables
1878 mInstantAEC = false;
1879 mResetInstantAEC = false;
1880 mInstantAECSettledFrameNumber = 0;
1881 mAecSkipDisplayFrameBound = 0;
1882 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001883 mCurrFeatureState = 0;
1884 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001885
Thierry Strudel3d639192016-09-09 11:52:26 -07001886 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1887
1888 size_t count = IS_TYPE_MAX;
1889 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1890 for (size_t i = 0; i < count; i++) {
1891 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001892 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1893 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001894 break;
1895 }
1896 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001897
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001898 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001899 maxEisWidth = MAX_EIS_WIDTH;
1900 maxEisHeight = MAX_EIS_HEIGHT;
1901 }
1902
1903 /* EIS setprop control */
1904 char eis_prop[PROPERTY_VALUE_MAX];
1905 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001906 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001907 eis_prop_set = (uint8_t)atoi(eis_prop);
1908
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001909 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1911
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001912 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1913 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001914
Thierry Strudel3d639192016-09-09 11:52:26 -07001915 /* stream configurations */
1916 for (size_t i = 0; i < streamList->num_streams; i++) {
1917 camera3_stream_t *newStream = streamList->streams[i];
1918 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1919 "height = %d, rotation = %d, usage = 0x%x",
1920 i, newStream->stream_type, newStream->format,
1921 newStream->width, newStream->height, newStream->rotation,
1922 newStream->usage);
1923 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1924 newStream->stream_type == CAMERA3_STREAM_INPUT){
1925 isZsl = true;
1926 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001927 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1928 IS_USAGE_PREVIEW(newStream->usage)) {
1929 isPreview = true;
1930 }
1931
Thierry Strudel3d639192016-09-09 11:52:26 -07001932 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1933 inputStream = newStream;
1934 }
1935
Emilian Peev7650c122017-01-19 08:24:33 -08001936 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1937 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001938 isJpeg = true;
1939 jpegSize.width = newStream->width;
1940 jpegSize.height = newStream->height;
1941 if (newStream->width > VIDEO_4K_WIDTH ||
1942 newStream->height > VIDEO_4K_HEIGHT)
1943 bJpegExceeds4K = true;
1944 }
1945
1946 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1947 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1948 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001949 // In HAL3 we can have multiple different video streams.
1950 // The variables video width and height are used below as
1951 // dimensions of the biggest of them
1952 if (videoWidth < newStream->width ||
1953 videoHeight < newStream->height) {
1954 videoWidth = newStream->width;
1955 videoHeight = newStream->height;
1956 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001957 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1958 (VIDEO_4K_HEIGHT <= newStream->height)) {
1959 m_bIs4KVideo = true;
1960 }
1961 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1962 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001963
Thierry Strudel3d639192016-09-09 11:52:26 -07001964 }
1965 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1966 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1967 switch (newStream->format) {
1968 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001969 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1970 depthPresent = true;
1971 break;
1972 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001973 stallStreamCnt++;
1974 if (isOnEncoder(maxViewfinderSize, newStream->width,
1975 newStream->height)) {
1976 numStreamsOnEncoder++;
1977 bJpegOnEncoder = true;
1978 }
1979 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1980 newStream->width);
1981 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1982 newStream->height);;
1983 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1984 "FATAL: max_downscale_factor cannot be zero and so assert");
1985 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1986 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1987 LOGH("Setting small jpeg size flag to true");
1988 bSmallJpegSize = true;
1989 }
1990 break;
1991 case HAL_PIXEL_FORMAT_RAW10:
1992 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1993 case HAL_PIXEL_FORMAT_RAW16:
1994 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001995 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1996 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1997 pdStatCount++;
1998 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001999 break;
2000 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2001 processedStreamCnt++;
2002 if (isOnEncoder(maxViewfinderSize, newStream->width,
2003 newStream->height)) {
2004 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2005 !IS_USAGE_ZSL(newStream->usage)) {
2006 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2007 }
2008 numStreamsOnEncoder++;
2009 }
2010 break;
2011 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2012 processedStreamCnt++;
2013 if (isOnEncoder(maxViewfinderSize, newStream->width,
2014 newStream->height)) {
2015 // If Yuv888 size is not greater than 4K, set feature mask
2016 // to SUPERSET so that it support concurrent request on
2017 // YUV and JPEG.
2018 if (newStream->width <= VIDEO_4K_WIDTH &&
2019 newStream->height <= VIDEO_4K_HEIGHT) {
2020 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2021 }
2022 numStreamsOnEncoder++;
2023 numYuv888OnEncoder++;
2024 largeYuv888Size.width = newStream->width;
2025 largeYuv888Size.height = newStream->height;
2026 }
2027 break;
2028 default:
2029 processedStreamCnt++;
2030 if (isOnEncoder(maxViewfinderSize, newStream->width,
2031 newStream->height)) {
2032 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2033 numStreamsOnEncoder++;
2034 }
2035 break;
2036 }
2037
2038 }
2039 }
2040
2041 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2042 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2043 !m_bIsVideo) {
2044 m_bEisEnable = false;
2045 }
2046
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002047 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2048 pthread_mutex_unlock(&mMutex);
2049 return -EINVAL;
2050 }
2051
Thierry Strudel54dc9782017-02-15 12:12:10 -08002052 uint8_t forceEnableTnr = 0;
2053 char tnr_prop[PROPERTY_VALUE_MAX];
2054 memset(tnr_prop, 0, sizeof(tnr_prop));
2055 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2056 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2057
Thierry Strudel3d639192016-09-09 11:52:26 -07002058 /* Logic to enable/disable TNR based on specific config size/etc.*/
2059 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002060 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2061 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002062 else if (forceEnableTnr)
2063 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002064
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002065 char videoHdrProp[PROPERTY_VALUE_MAX];
2066 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2067 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2068 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2069
2070 if (hdr_mode_prop == 1 && m_bIsVideo &&
2071 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2072 m_bVideoHdrEnabled = true;
2073 else
2074 m_bVideoHdrEnabled = false;
2075
2076
Thierry Strudel3d639192016-09-09 11:52:26 -07002077 /* Check if num_streams is sane */
2078 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2079 rawStreamCnt > MAX_RAW_STREAMS ||
2080 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2081 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2082 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2083 pthread_mutex_unlock(&mMutex);
2084 return -EINVAL;
2085 }
2086 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002087 if (isZsl && m_bIs4KVideo) {
2088 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002089 pthread_mutex_unlock(&mMutex);
2090 return -EINVAL;
2091 }
2092 /* Check if stream sizes are sane */
2093 if (numStreamsOnEncoder > 2) {
2094 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2095 pthread_mutex_unlock(&mMutex);
2096 return -EINVAL;
2097 } else if (1 < numStreamsOnEncoder){
2098 bUseCommonFeatureMask = true;
2099 LOGH("Multiple streams above max viewfinder size, common mask needed");
2100 }
2101
2102 /* Check if BLOB size is greater than 4k in 4k recording case */
2103 if (m_bIs4KVideo && bJpegExceeds4K) {
2104 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2105 pthread_mutex_unlock(&mMutex);
2106 return -EINVAL;
2107 }
2108
Emilian Peev7650c122017-01-19 08:24:33 -08002109 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2110 depthPresent) {
2111 LOGE("HAL doesn't support depth streams in HFR mode!");
2112 pthread_mutex_unlock(&mMutex);
2113 return -EINVAL;
2114 }
2115
Thierry Strudel3d639192016-09-09 11:52:26 -07002116 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2117 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2118 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2119 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2120 // configurations:
2121 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2122 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2123 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2124 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2125 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2126 __func__);
2127 pthread_mutex_unlock(&mMutex);
2128 return -EINVAL;
2129 }
2130
2131 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2132 // the YUV stream's size is greater or equal to the JPEG size, set common
2133 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2134 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2135 jpegSize.width, jpegSize.height) &&
2136 largeYuv888Size.width > jpegSize.width &&
2137 largeYuv888Size.height > jpegSize.height) {
2138 bYuv888OverrideJpeg = true;
2139 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2140 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2141 }
2142
2143 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2144 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2145 commonFeatureMask);
2146 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2147 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2148
2149 rc = validateStreamDimensions(streamList);
2150 if (rc == NO_ERROR) {
2151 rc = validateStreamRotations(streamList);
2152 }
2153 if (rc != NO_ERROR) {
2154 LOGE("Invalid stream configuration requested!");
2155 pthread_mutex_unlock(&mMutex);
2156 return rc;
2157 }
2158
Emilian Peev0f3c3162017-03-15 12:57:46 +00002159 if (1 < pdStatCount) {
2160 LOGE("HAL doesn't support multiple PD streams");
2161 pthread_mutex_unlock(&mMutex);
2162 return -EINVAL;
2163 }
2164
2165 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2166 (1 == pdStatCount)) {
2167 LOGE("HAL doesn't support PD streams in HFR mode!");
2168 pthread_mutex_unlock(&mMutex);
2169 return -EINVAL;
2170 }
2171
Thierry Strudel3d639192016-09-09 11:52:26 -07002172 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2173 for (size_t i = 0; i < streamList->num_streams; i++) {
2174 camera3_stream_t *newStream = streamList->streams[i];
2175 LOGH("newStream type = %d, stream format = %d "
2176 "stream size : %d x %d, stream rotation = %d",
2177 newStream->stream_type, newStream->format,
2178 newStream->width, newStream->height, newStream->rotation);
2179 //if the stream is in the mStreamList validate it
2180 bool stream_exists = false;
2181 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2182 it != mStreamInfo.end(); it++) {
2183 if ((*it)->stream == newStream) {
2184 QCamera3ProcessingChannel *channel =
2185 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2186 stream_exists = true;
2187 if (channel)
2188 delete channel;
2189 (*it)->status = VALID;
2190 (*it)->stream->priv = NULL;
2191 (*it)->channel = NULL;
2192 }
2193 }
2194 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2195 //new stream
2196 stream_info_t* stream_info;
2197 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2198 if (!stream_info) {
2199 LOGE("Could not allocate stream info");
2200 rc = -ENOMEM;
2201 pthread_mutex_unlock(&mMutex);
2202 return rc;
2203 }
2204 stream_info->stream = newStream;
2205 stream_info->status = VALID;
2206 stream_info->channel = NULL;
2207 mStreamInfo.push_back(stream_info);
2208 }
2209 /* Covers Opaque ZSL and API1 F/W ZSL */
2210 if (IS_USAGE_ZSL(newStream->usage)
2211 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2212 if (zslStream != NULL) {
2213 LOGE("Multiple input/reprocess streams requested!");
2214 pthread_mutex_unlock(&mMutex);
2215 return BAD_VALUE;
2216 }
2217 zslStream = newStream;
2218 }
2219 /* Covers YUV reprocess */
2220 if (inputStream != NULL) {
2221 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2222 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2223 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2224 && inputStream->width == newStream->width
2225 && inputStream->height == newStream->height) {
2226 if (zslStream != NULL) {
2227 /* This scenario indicates multiple YUV streams with same size
2228 * as input stream have been requested, since zsl stream handle
2229 * is solely use for the purpose of overriding the size of streams
2230 * which share h/w streams we will just make a guess here as to
2231 * which of the stream is a ZSL stream, this will be refactored
2232 * once we make generic logic for streams sharing encoder output
2233 */
2234 LOGH("Warning, Multiple ip/reprocess streams requested!");
2235 }
2236 zslStream = newStream;
2237 }
2238 }
2239 }
2240
2241 /* If a zsl stream is set, we know that we have configured at least one input or
2242 bidirectional stream */
2243 if (NULL != zslStream) {
2244 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2245 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2246 mInputStreamInfo.format = zslStream->format;
2247 mInputStreamInfo.usage = zslStream->usage;
2248 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2249 mInputStreamInfo.dim.width,
2250 mInputStreamInfo.dim.height,
2251 mInputStreamInfo.format, mInputStreamInfo.usage);
2252 }
2253
2254 cleanAndSortStreamInfo();
2255 if (mMetadataChannel) {
2256 delete mMetadataChannel;
2257 mMetadataChannel = NULL;
2258 }
2259 if (mSupportChannel) {
2260 delete mSupportChannel;
2261 mSupportChannel = NULL;
2262 }
2263
2264 if (mAnalysisChannel) {
2265 delete mAnalysisChannel;
2266 mAnalysisChannel = NULL;
2267 }
2268
2269 if (mDummyBatchChannel) {
2270 delete mDummyBatchChannel;
2271 mDummyBatchChannel = NULL;
2272 }
2273
Emilian Peev7650c122017-01-19 08:24:33 -08002274 if (mDepthChannel) {
2275 mDepthChannel = NULL;
2276 }
Emilian Peev666f5142017-06-02 16:47:04 +01002277 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002278
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002279 mShutterDispatcher.clear();
2280 mOutputBufferDispatcher.clear();
2281
Thierry Strudel2896d122017-02-23 19:18:03 -08002282 char is_type_value[PROPERTY_VALUE_MAX];
2283 property_get("persist.camera.is_type", is_type_value, "4");
2284 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2285
Binhao Line406f062017-05-03 14:39:44 -07002286 char property_value[PROPERTY_VALUE_MAX];
2287 property_get("persist.camera.gzoom.at", property_value, "0");
2288 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002289 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2290 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2291 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2292 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002293
2294 property_get("persist.camera.gzoom.4k", property_value, "0");
2295 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2296
Thierry Strudel3d639192016-09-09 11:52:26 -07002297 //Create metadata channel and initialize it
2298 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2299 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2300 gCamCapability[mCameraId]->color_arrangement);
2301 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2302 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002303 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002304 if (mMetadataChannel == NULL) {
2305 LOGE("failed to allocate metadata channel");
2306 rc = -ENOMEM;
2307 pthread_mutex_unlock(&mMutex);
2308 return rc;
2309 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002310 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002311 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2312 if (rc < 0) {
2313 LOGE("metadata channel initialization failed");
2314 delete mMetadataChannel;
2315 mMetadataChannel = NULL;
2316 pthread_mutex_unlock(&mMutex);
2317 return rc;
2318 }
2319
Thierry Strudel2896d122017-02-23 19:18:03 -08002320 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002321 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002322 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002323 // Keep track of preview/video streams indices.
2324 // There could be more than one preview streams, but only one video stream.
2325 int32_t video_stream_idx = -1;
2326 int32_t preview_stream_idx[streamList->num_streams];
2327 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002328 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2329 /* Allocate channel objects for the requested streams */
2330 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002331
Thierry Strudel3d639192016-09-09 11:52:26 -07002332 camera3_stream_t *newStream = streamList->streams[i];
2333 uint32_t stream_usage = newStream->usage;
2334 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2335 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2336 struct camera_info *p_info = NULL;
2337 pthread_mutex_lock(&gCamLock);
2338 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2339 pthread_mutex_unlock(&gCamLock);
2340 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2341 || IS_USAGE_ZSL(newStream->usage)) &&
2342 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002343 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002345 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2346 if (bUseCommonFeatureMask)
2347 zsl_ppmask = commonFeatureMask;
2348 else
2349 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002350 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002351 if (numStreamsOnEncoder > 0)
2352 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2353 else
2354 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002355 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002356 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002357 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002358 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002359 LOGH("Input stream configured, reprocess config");
2360 } else {
2361 //for non zsl streams find out the format
2362 switch (newStream->format) {
2363 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2364 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002365 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002366 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2367 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2368 /* add additional features to pp feature mask */
2369 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2370 mStreamConfigInfo.num_streams);
2371
2372 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2373 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2374 CAM_STREAM_TYPE_VIDEO;
2375 if (m_bTnrEnabled && m_bTnrVideo) {
2376 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2377 CAM_QCOM_FEATURE_CPP_TNR;
2378 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2379 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2380 ~CAM_QCOM_FEATURE_CDS;
2381 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002382 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2383 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2384 CAM_QTI_FEATURE_PPEISCORE;
2385 }
Binhao Line406f062017-05-03 14:39:44 -07002386 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2387 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2388 CAM_QCOM_FEATURE_GOOG_ZOOM;
2389 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002390 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 } else {
2392 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2393 CAM_STREAM_TYPE_PREVIEW;
2394 if (m_bTnrEnabled && m_bTnrPreview) {
2395 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2396 CAM_QCOM_FEATURE_CPP_TNR;
2397 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2399 ~CAM_QCOM_FEATURE_CDS;
2400 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002401 if(!m_bSwTnrPreview) {
2402 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2403 ~CAM_QTI_FEATURE_SW_TNR;
2404 }
Binhao Line406f062017-05-03 14:39:44 -07002405 if (is_goog_zoom_preview_enabled) {
2406 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2407 CAM_QCOM_FEATURE_GOOG_ZOOM;
2408 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002409 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002410 padding_info.width_padding = mSurfaceStridePadding;
2411 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002412 previewSize.width = (int32_t)newStream->width;
2413 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002414 }
2415 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2416 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2417 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2418 newStream->height;
2419 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2420 newStream->width;
2421 }
2422 }
2423 break;
2424 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002425 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2427 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2428 if (bUseCommonFeatureMask)
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2430 commonFeatureMask;
2431 else
2432 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2433 CAM_QCOM_FEATURE_NONE;
2434 } else {
2435 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2436 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2437 }
2438 break;
2439 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002440 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002441 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2442 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2443 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2444 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2445 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002446 /* Remove rotation if it is not supported
2447 for 4K LiveVideo snapshot case (online processing) */
2448 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2449 CAM_QCOM_FEATURE_ROTATION)) {
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2451 &= ~CAM_QCOM_FEATURE_ROTATION;
2452 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002453 } else {
2454 if (bUseCommonFeatureMask &&
2455 isOnEncoder(maxViewfinderSize, newStream->width,
2456 newStream->height)) {
2457 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2458 } else {
2459 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2460 }
2461 }
2462 if (isZsl) {
2463 if (zslStream) {
2464 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2465 (int32_t)zslStream->width;
2466 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2467 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2469 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002470 } else {
2471 LOGE("Error, No ZSL stream identified");
2472 pthread_mutex_unlock(&mMutex);
2473 return -EINVAL;
2474 }
2475 } else if (m_bIs4KVideo) {
2476 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2477 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2478 } else if (bYuv888OverrideJpeg) {
2479 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2480 (int32_t)largeYuv888Size.width;
2481 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2482 (int32_t)largeYuv888Size.height;
2483 }
2484 break;
2485 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2486 case HAL_PIXEL_FORMAT_RAW16:
2487 case HAL_PIXEL_FORMAT_RAW10:
2488 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2489 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2490 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002491 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2492 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2493 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2494 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2495 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2496 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2497 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2498 gCamCapability[mCameraId]->dt[mPDIndex];
2499 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2500 gCamCapability[mCameraId]->vc[mPDIndex];
2501 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002502 break;
2503 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002504 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002505 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2506 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2507 break;
2508 }
2509 }
2510
2511 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2512 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2513 gCamCapability[mCameraId]->color_arrangement);
2514
2515 if (newStream->priv == NULL) {
2516 //New stream, construct channel
2517 switch (newStream->stream_type) {
2518 case CAMERA3_STREAM_INPUT:
2519 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2520 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2521 break;
2522 case CAMERA3_STREAM_BIDIRECTIONAL:
2523 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2524 GRALLOC_USAGE_HW_CAMERA_WRITE;
2525 break;
2526 case CAMERA3_STREAM_OUTPUT:
2527 /* For video encoding stream, set read/write rarely
2528 * flag so that they may be set to un-cached */
2529 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2530 newStream->usage |=
2531 (GRALLOC_USAGE_SW_READ_RARELY |
2532 GRALLOC_USAGE_SW_WRITE_RARELY |
2533 GRALLOC_USAGE_HW_CAMERA_WRITE);
2534 else if (IS_USAGE_ZSL(newStream->usage))
2535 {
2536 LOGD("ZSL usage flag skipping");
2537 }
2538 else if (newStream == zslStream
2539 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2540 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2541 } else
2542 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2543 break;
2544 default:
2545 LOGE("Invalid stream_type %d", newStream->stream_type);
2546 break;
2547 }
2548
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002549 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002550 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2551 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2552 QCamera3ProcessingChannel *channel = NULL;
2553 switch (newStream->format) {
2554 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2555 if ((newStream->usage &
2556 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2557 (streamList->operation_mode ==
2558 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2559 ) {
2560 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2561 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002562 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002563 this,
2564 newStream,
2565 (cam_stream_type_t)
2566 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2567 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2568 mMetadataChannel,
2569 0); //heap buffers are not required for HFR video channel
2570 if (channel == NULL) {
2571 LOGE("allocation of channel failed");
2572 pthread_mutex_unlock(&mMutex);
2573 return -ENOMEM;
2574 }
2575 //channel->getNumBuffers() will return 0 here so use
2576 //MAX_INFLIGH_HFR_REQUESTS
2577 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2578 newStream->priv = channel;
2579 LOGI("num video buffers in HFR mode: %d",
2580 MAX_INFLIGHT_HFR_REQUESTS);
2581 } else {
2582 /* Copy stream contents in HFR preview only case to create
2583 * dummy batch channel so that sensor streaming is in
2584 * HFR mode */
2585 if (!m_bIsVideo && (streamList->operation_mode ==
2586 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2587 mDummyBatchStream = *newStream;
2588 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002589 int bufferCount = MAX_INFLIGHT_REQUESTS;
2590 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2591 CAM_STREAM_TYPE_VIDEO) {
2592 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2593 bufferCount = MAX_VIDEO_BUFFERS;
2594 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002595 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2596 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002597 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002598 this,
2599 newStream,
2600 (cam_stream_type_t)
2601 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2602 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2603 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002604 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002605 if (channel == NULL) {
2606 LOGE("allocation of channel failed");
2607 pthread_mutex_unlock(&mMutex);
2608 return -ENOMEM;
2609 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002610 /* disable UBWC for preview, though supported,
2611 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002612 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002613 (previewSize.width == (int32_t)videoWidth)&&
2614 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002615 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002616 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002617 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002618 /* When goog_zoom is linked to the preview or video stream,
2619 * disable ubwc to the linked stream */
2620 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2621 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2622 channel->setUBWCEnabled(false);
2623 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002624 newStream->max_buffers = channel->getNumBuffers();
2625 newStream->priv = channel;
2626 }
2627 break;
2628 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2629 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2630 mChannelHandle,
2631 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002632 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002633 this,
2634 newStream,
2635 (cam_stream_type_t)
2636 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2637 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2638 mMetadataChannel);
2639 if (channel == NULL) {
2640 LOGE("allocation of YUV channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
2644 newStream->max_buffers = channel->getNumBuffers();
2645 newStream->priv = channel;
2646 break;
2647 }
2648 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2649 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002650 case HAL_PIXEL_FORMAT_RAW10: {
2651 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2652 (HAL_DATASPACE_DEPTH != newStream->data_space))
2653 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002654 mRawChannel = new QCamera3RawChannel(
2655 mCameraHandle->camera_handle, mChannelHandle,
2656 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002657 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002658 this, newStream,
2659 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002660 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002661 if (mRawChannel == NULL) {
2662 LOGE("allocation of raw channel failed");
2663 pthread_mutex_unlock(&mMutex);
2664 return -ENOMEM;
2665 }
2666 newStream->max_buffers = mRawChannel->getNumBuffers();
2667 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2668 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002669 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002670 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002671 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2672 mDepthChannel = new QCamera3DepthChannel(
2673 mCameraHandle->camera_handle, mChannelHandle,
2674 mCameraHandle->ops, NULL, NULL, &padding_info,
2675 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2676 mMetadataChannel);
2677 if (NULL == mDepthChannel) {
2678 LOGE("Allocation of depth channel failed");
2679 pthread_mutex_unlock(&mMutex);
2680 return NO_MEMORY;
2681 }
2682 newStream->priv = mDepthChannel;
2683 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2684 } else {
2685 // Max live snapshot inflight buffer is 1. This is to mitigate
2686 // frame drop issues for video snapshot. The more buffers being
2687 // allocated, the more frame drops there are.
2688 mPictureChannel = new QCamera3PicChannel(
2689 mCameraHandle->camera_handle, mChannelHandle,
2690 mCameraHandle->ops, captureResultCb,
2691 setBufferErrorStatus, &padding_info, this, newStream,
2692 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2693 m_bIs4KVideo, isZsl, mMetadataChannel,
2694 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2695 if (mPictureChannel == NULL) {
2696 LOGE("allocation of channel failed");
2697 pthread_mutex_unlock(&mMutex);
2698 return -ENOMEM;
2699 }
2700 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2701 newStream->max_buffers = mPictureChannel->getNumBuffers();
2702 mPictureChannel->overrideYuvSize(
2703 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2704 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 break;
2707
2708 default:
2709 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002710 pthread_mutex_unlock(&mMutex);
2711 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002712 }
2713 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2714 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2715 } else {
2716 LOGE("Error, Unknown stream type");
2717 pthread_mutex_unlock(&mMutex);
2718 return -EINVAL;
2719 }
2720
2721 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002722 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002723 // Here we only care whether it's EIS3 or not
2724 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2725 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2726 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2727 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002728 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002729 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002730 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2732 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2733 }
2734 }
2735
2736 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2737 it != mStreamInfo.end(); it++) {
2738 if ((*it)->stream == newStream) {
2739 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2740 break;
2741 }
2742 }
2743 } else {
2744 // Channel already exists for this stream
2745 // Do nothing for now
2746 }
2747 padding_info = gCamCapability[mCameraId]->padding_info;
2748
Emilian Peev7650c122017-01-19 08:24:33 -08002749 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002750 * since there is no real stream associated with it
2751 */
Emilian Peev7650c122017-01-19 08:24:33 -08002752 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002753 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2754 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002755 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002756 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002757 }
2758
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002759 // Let buffer dispatcher know the configured streams.
2760 mOutputBufferDispatcher.configureStreams(streamList);
2761
Binhao Lincdb362a2017-04-20 13:31:54 -07002762 // By default, preview stream TNR is disabled.
2763 // Enable TNR to the preview stream if all conditions below are satisfied:
2764 // 1. resolution <= 1080p.
2765 // 2. preview resolution == video resolution.
2766 // 3. video stream TNR is enabled.
2767 // 4. EIS2.0
2768 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2769 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2770 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2771 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2772 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2773 video_stream->width == preview_stream->width &&
2774 video_stream->height == preview_stream->height) {
2775 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2776 CAM_QCOM_FEATURE_CPP_TNR;
2777 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2778 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2779 ~CAM_QCOM_FEATURE_CDS;
2780 }
2781 }
2782
Thierry Strudel2896d122017-02-23 19:18:03 -08002783 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2784 onlyRaw = false;
2785 }
2786
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002787 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002788 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002789 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002790 cam_analysis_info_t analysisInfo;
2791 int32_t ret = NO_ERROR;
2792 ret = mCommon.getAnalysisInfo(
2793 FALSE,
2794 analysisFeatureMask,
2795 &analysisInfo);
2796 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002797 cam_color_filter_arrangement_t analysis_color_arrangement =
2798 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2799 CAM_FILTER_ARRANGEMENT_Y :
2800 gCamCapability[mCameraId]->color_arrangement);
2801 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2802 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002803 cam_dimension_t analysisDim;
2804 analysisDim = mCommon.getMatchingDimension(previewSize,
2805 analysisInfo.analysis_recommended_res);
2806
2807 mAnalysisChannel = new QCamera3SupportChannel(
2808 mCameraHandle->camera_handle,
2809 mChannelHandle,
2810 mCameraHandle->ops,
2811 &analysisInfo.analysis_padding_info,
2812 analysisFeatureMask,
2813 CAM_STREAM_TYPE_ANALYSIS,
2814 &analysisDim,
2815 (analysisInfo.analysis_format
2816 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2817 : CAM_FORMAT_YUV_420_NV21),
2818 analysisInfo.hw_analysis_supported,
2819 gCamCapability[mCameraId]->color_arrangement,
2820 this,
2821 0); // force buffer count to 0
2822 } else {
2823 LOGW("getAnalysisInfo failed, ret = %d", ret);
2824 }
2825 if (!mAnalysisChannel) {
2826 LOGW("Analysis channel cannot be created");
2827 }
2828 }
2829
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 //RAW DUMP channel
2831 if (mEnableRawDump && isRawStreamRequested == false){
2832 cam_dimension_t rawDumpSize;
2833 rawDumpSize = getMaxRawSize(mCameraId);
2834 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2835 setPAAFSupport(rawDumpFeatureMask,
2836 CAM_STREAM_TYPE_RAW,
2837 gCamCapability[mCameraId]->color_arrangement);
2838 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2839 mChannelHandle,
2840 mCameraHandle->ops,
2841 rawDumpSize,
2842 &padding_info,
2843 this, rawDumpFeatureMask);
2844 if (!mRawDumpChannel) {
2845 LOGE("Raw Dump channel cannot be created");
2846 pthread_mutex_unlock(&mMutex);
2847 return -ENOMEM;
2848 }
2849 }
2850
Thierry Strudel3d639192016-09-09 11:52:26 -07002851 if (mAnalysisChannel) {
2852 cam_analysis_info_t analysisInfo;
2853 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2854 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2855 CAM_STREAM_TYPE_ANALYSIS;
2856 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2857 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002858 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002859 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2860 &analysisInfo);
2861 if (rc != NO_ERROR) {
2862 LOGE("getAnalysisInfo failed, ret = %d", rc);
2863 pthread_mutex_unlock(&mMutex);
2864 return rc;
2865 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002866 cam_color_filter_arrangement_t analysis_color_arrangement =
2867 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2868 CAM_FILTER_ARRANGEMENT_Y :
2869 gCamCapability[mCameraId]->color_arrangement);
2870 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2871 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2872 analysis_color_arrangement);
2873
Thierry Strudel3d639192016-09-09 11:52:26 -07002874 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002875 mCommon.getMatchingDimension(previewSize,
2876 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002877 mStreamConfigInfo.num_streams++;
2878 }
2879
Thierry Strudel2896d122017-02-23 19:18:03 -08002880 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002881 cam_analysis_info_t supportInfo;
2882 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2883 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2884 setPAAFSupport(callbackFeatureMask,
2885 CAM_STREAM_TYPE_CALLBACK,
2886 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002887 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002888 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002889 if (ret != NO_ERROR) {
2890 /* Ignore the error for Mono camera
2891 * because the PAAF bit mask is only set
2892 * for CAM_STREAM_TYPE_ANALYSIS stream type
2893 */
2894 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2895 LOGW("getAnalysisInfo failed, ret = %d", ret);
2896 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002897 }
2898 mSupportChannel = new QCamera3SupportChannel(
2899 mCameraHandle->camera_handle,
2900 mChannelHandle,
2901 mCameraHandle->ops,
2902 &gCamCapability[mCameraId]->padding_info,
2903 callbackFeatureMask,
2904 CAM_STREAM_TYPE_CALLBACK,
2905 &QCamera3SupportChannel::kDim,
2906 CAM_FORMAT_YUV_420_NV21,
2907 supportInfo.hw_analysis_supported,
2908 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002909 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002910 if (!mSupportChannel) {
2911 LOGE("dummy channel cannot be created");
2912 pthread_mutex_unlock(&mMutex);
2913 return -ENOMEM;
2914 }
2915 }
2916
2917 if (mSupportChannel) {
2918 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2919 QCamera3SupportChannel::kDim;
2920 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2921 CAM_STREAM_TYPE_CALLBACK;
2922 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2923 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2924 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2925 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2926 gCamCapability[mCameraId]->color_arrangement);
2927 mStreamConfigInfo.num_streams++;
2928 }
2929
2930 if (mRawDumpChannel) {
2931 cam_dimension_t rawSize;
2932 rawSize = getMaxRawSize(mCameraId);
2933 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2934 rawSize;
2935 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2936 CAM_STREAM_TYPE_RAW;
2937 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2938 CAM_QCOM_FEATURE_NONE;
2939 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2940 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2941 gCamCapability[mCameraId]->color_arrangement);
2942 mStreamConfigInfo.num_streams++;
2943 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002944
2945 if (mHdrPlusRawSrcChannel) {
2946 cam_dimension_t rawSize;
2947 rawSize = getMaxRawSize(mCameraId);
2948 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2949 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2950 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2951 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2952 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2953 gCamCapability[mCameraId]->color_arrangement);
2954 mStreamConfigInfo.num_streams++;
2955 }
2956
Thierry Strudel3d639192016-09-09 11:52:26 -07002957 /* In HFR mode, if video stream is not added, create a dummy channel so that
2958 * ISP can create a batch mode even for preview only case. This channel is
2959 * never 'start'ed (no stream-on), it is only 'initialized' */
2960 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2961 !m_bIsVideo) {
2962 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2963 setPAAFSupport(dummyFeatureMask,
2964 CAM_STREAM_TYPE_VIDEO,
2965 gCamCapability[mCameraId]->color_arrangement);
2966 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2967 mChannelHandle,
2968 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002969 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002970 this,
2971 &mDummyBatchStream,
2972 CAM_STREAM_TYPE_VIDEO,
2973 dummyFeatureMask,
2974 mMetadataChannel);
2975 if (NULL == mDummyBatchChannel) {
2976 LOGE("creation of mDummyBatchChannel failed."
2977 "Preview will use non-hfr sensor mode ");
2978 }
2979 }
2980 if (mDummyBatchChannel) {
2981 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2982 mDummyBatchStream.width;
2983 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2984 mDummyBatchStream.height;
2985 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2986 CAM_STREAM_TYPE_VIDEO;
2987 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2988 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2989 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2990 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2991 gCamCapability[mCameraId]->color_arrangement);
2992 mStreamConfigInfo.num_streams++;
2993 }
2994
2995 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2996 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002997 m_bIs4KVideo ? 0 :
2998 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002999
3000 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3001 for (pendingRequestIterator i = mPendingRequestsList.begin();
3002 i != mPendingRequestsList.end();) {
3003 i = erasePendingRequest(i);
3004 }
3005 mPendingFrameDropList.clear();
3006 // Initialize/Reset the pending buffers list
3007 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3008 req.mPendingBufferList.clear();
3009 }
3010 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3011
Thierry Strudel3d639192016-09-09 11:52:26 -07003012 mCurJpegMeta.clear();
3013 //Get min frame duration for this streams configuration
3014 deriveMinFrameDuration();
3015
Chien-Yu Chenee335912017-02-09 17:53:20 -08003016 mFirstPreviewIntentSeen = false;
3017
3018 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003019 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003020 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3021 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003022 disableHdrPlusModeLocked();
3023 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003024
Thierry Strudel3d639192016-09-09 11:52:26 -07003025 // Update state
3026 mState = CONFIGURED;
3027
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003028 mFirstMetadataCallback = true;
3029
Thierry Strudel3d639192016-09-09 11:52:26 -07003030 pthread_mutex_unlock(&mMutex);
3031
3032 return rc;
3033}
3034
3035/*===========================================================================
3036 * FUNCTION : validateCaptureRequest
3037 *
3038 * DESCRIPTION: validate a capture request from camera service
3039 *
3040 * PARAMETERS :
3041 * @request : request from framework to process
3042 *
3043 * RETURN :
3044 *
3045 *==========================================================================*/
3046int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003047 camera3_capture_request_t *request,
3048 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003049{
3050 ssize_t idx = 0;
3051 const camera3_stream_buffer_t *b;
3052 CameraMetadata meta;
3053
3054 /* Sanity check the request */
3055 if (request == NULL) {
3056 LOGE("NULL capture request");
3057 return BAD_VALUE;
3058 }
3059
3060 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3061 /*settings cannot be null for the first request*/
3062 return BAD_VALUE;
3063 }
3064
3065 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003066 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3067 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003068 LOGE("Request %d: No output buffers provided!",
3069 __FUNCTION__, frameNumber);
3070 return BAD_VALUE;
3071 }
3072 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3073 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3074 request->num_output_buffers, MAX_NUM_STREAMS);
3075 return BAD_VALUE;
3076 }
3077 if (request->input_buffer != NULL) {
3078 b = request->input_buffer;
3079 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3080 LOGE("Request %d: Buffer %ld: Status not OK!",
3081 frameNumber, (long)idx);
3082 return BAD_VALUE;
3083 }
3084 if (b->release_fence != -1) {
3085 LOGE("Request %d: Buffer %ld: Has a release fence!",
3086 frameNumber, (long)idx);
3087 return BAD_VALUE;
3088 }
3089 if (b->buffer == NULL) {
3090 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3091 frameNumber, (long)idx);
3092 return BAD_VALUE;
3093 }
3094 }
3095
3096 // Validate all buffers
3097 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003098 if (b == NULL) {
3099 return BAD_VALUE;
3100 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003101 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003102 QCamera3ProcessingChannel *channel =
3103 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3104 if (channel == NULL) {
3105 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3106 frameNumber, (long)idx);
3107 return BAD_VALUE;
3108 }
3109 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3110 LOGE("Request %d: Buffer %ld: Status not OK!",
3111 frameNumber, (long)idx);
3112 return BAD_VALUE;
3113 }
3114 if (b->release_fence != -1) {
3115 LOGE("Request %d: Buffer %ld: Has a release fence!",
3116 frameNumber, (long)idx);
3117 return BAD_VALUE;
3118 }
3119 if (b->buffer == NULL) {
3120 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3121 frameNumber, (long)idx);
3122 return BAD_VALUE;
3123 }
3124 if (*(b->buffer) == NULL) {
3125 LOGE("Request %d: Buffer %ld: NULL private handle!",
3126 frameNumber, (long)idx);
3127 return BAD_VALUE;
3128 }
3129 idx++;
3130 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003131 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003132 return NO_ERROR;
3133}
3134
3135/*===========================================================================
3136 * FUNCTION : deriveMinFrameDuration
3137 *
3138 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3139 * on currently configured streams.
3140 *
3141 * PARAMETERS : NONE
3142 *
3143 * RETURN : NONE
3144 *
3145 *==========================================================================*/
3146void QCamera3HardwareInterface::deriveMinFrameDuration()
3147{
3148 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3149
3150 maxJpegDim = 0;
3151 maxProcessedDim = 0;
3152 maxRawDim = 0;
3153
3154 // Figure out maximum jpeg, processed, and raw dimensions
3155 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3156 it != mStreamInfo.end(); it++) {
3157
3158 // Input stream doesn't have valid stream_type
3159 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3160 continue;
3161
3162 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3163 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3164 if (dimension > maxJpegDim)
3165 maxJpegDim = dimension;
3166 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3167 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3168 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3169 if (dimension > maxRawDim)
3170 maxRawDim = dimension;
3171 } else {
3172 if (dimension > maxProcessedDim)
3173 maxProcessedDim = dimension;
3174 }
3175 }
3176
3177 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3178 MAX_SIZES_CNT);
3179
3180 //Assume all jpeg dimensions are in processed dimensions.
3181 if (maxJpegDim > maxProcessedDim)
3182 maxProcessedDim = maxJpegDim;
3183 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3184 if (maxProcessedDim > maxRawDim) {
3185 maxRawDim = INT32_MAX;
3186
3187 for (size_t i = 0; i < count; i++) {
3188 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3189 gCamCapability[mCameraId]->raw_dim[i].height;
3190 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3191 maxRawDim = dimension;
3192 }
3193 }
3194
3195 //Find minimum durations for processed, jpeg, and raw
3196 for (size_t i = 0; i < count; i++) {
3197 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3198 gCamCapability[mCameraId]->raw_dim[i].height) {
3199 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3200 break;
3201 }
3202 }
3203 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3204 for (size_t i = 0; i < count; i++) {
3205 if (maxProcessedDim ==
3206 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3207 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3208 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3209 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3210 break;
3211 }
3212 }
3213}
3214
3215/*===========================================================================
3216 * FUNCTION : getMinFrameDuration
3217 *
3218 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3219 * and current request configuration.
3220 *
3221 * PARAMETERS : @request: requset sent by the frameworks
3222 *
3223 * RETURN : min farme duration for a particular request
3224 *
3225 *==========================================================================*/
3226int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3227{
3228 bool hasJpegStream = false;
3229 bool hasRawStream = false;
3230 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3231 const camera3_stream_t *stream = request->output_buffers[i].stream;
3232 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3233 hasJpegStream = true;
3234 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3235 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3236 stream->format == HAL_PIXEL_FORMAT_RAW16)
3237 hasRawStream = true;
3238 }
3239
3240 if (!hasJpegStream)
3241 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3242 else
3243 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3244}
3245
3246/*===========================================================================
3247 * FUNCTION : handleBuffersDuringFlushLock
3248 *
3249 * DESCRIPTION: Account for buffers returned from back-end during flush
3250 * This function is executed while mMutex is held by the caller.
3251 *
3252 * PARAMETERS :
3253 * @buffer: image buffer for the callback
3254 *
3255 * RETURN :
3256 *==========================================================================*/
3257void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3258{
3259 bool buffer_found = false;
3260 for (List<PendingBuffersInRequest>::iterator req =
3261 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3262 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3263 for (List<PendingBufferInfo>::iterator i =
3264 req->mPendingBufferList.begin();
3265 i != req->mPendingBufferList.end(); i++) {
3266 if (i->buffer == buffer->buffer) {
3267 mPendingBuffersMap.numPendingBufsAtFlush--;
3268 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3269 buffer->buffer, req->frame_number,
3270 mPendingBuffersMap.numPendingBufsAtFlush);
3271 buffer_found = true;
3272 break;
3273 }
3274 }
3275 if (buffer_found) {
3276 break;
3277 }
3278 }
3279 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3280 //signal the flush()
3281 LOGD("All buffers returned to HAL. Continue flush");
3282 pthread_cond_signal(&mBuffersCond);
3283 }
3284}
3285
Thierry Strudel3d639192016-09-09 11:52:26 -07003286/*===========================================================================
3287 * FUNCTION : handleBatchMetadata
3288 *
3289 * DESCRIPTION: Handles metadata buffer callback in batch mode
3290 *
3291 * PARAMETERS : @metadata_buf: metadata buffer
3292 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3293 * the meta buf in this method
3294 *
3295 * RETURN :
3296 *
3297 *==========================================================================*/
3298void QCamera3HardwareInterface::handleBatchMetadata(
3299 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3300{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003301 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003302
3303 if (NULL == metadata_buf) {
3304 LOGE("metadata_buf is NULL");
3305 return;
3306 }
3307 /* In batch mode, the metdata will contain the frame number and timestamp of
3308 * the last frame in the batch. Eg: a batch containing buffers from request
3309 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3310 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3311 * multiple process_capture_results */
3312 metadata_buffer_t *metadata =
3313 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3314 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3315 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3316 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3317 uint32_t frame_number = 0, urgent_frame_number = 0;
3318 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3319 bool invalid_metadata = false;
3320 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3321 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003322 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003323
3324 int32_t *p_frame_number_valid =
3325 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3326 uint32_t *p_frame_number =
3327 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3328 int64_t *p_capture_time =
3329 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3330 int32_t *p_urgent_frame_number_valid =
3331 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3332 uint32_t *p_urgent_frame_number =
3333 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3334
3335 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3336 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3337 (NULL == p_urgent_frame_number)) {
3338 LOGE("Invalid metadata");
3339 invalid_metadata = true;
3340 } else {
3341 frame_number_valid = *p_frame_number_valid;
3342 last_frame_number = *p_frame_number;
3343 last_frame_capture_time = *p_capture_time;
3344 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3345 last_urgent_frame_number = *p_urgent_frame_number;
3346 }
3347
3348 /* In batchmode, when no video buffers are requested, set_parms are sent
3349 * for every capture_request. The difference between consecutive urgent
3350 * frame numbers and frame numbers should be used to interpolate the
3351 * corresponding frame numbers and time stamps */
3352 pthread_mutex_lock(&mMutex);
3353 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003354 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3355 if(idx < 0) {
3356 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3357 last_urgent_frame_number);
3358 mState = ERROR;
3359 pthread_mutex_unlock(&mMutex);
3360 return;
3361 }
3362 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003363 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3364 first_urgent_frame_number;
3365
3366 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3367 urgent_frame_number_valid,
3368 first_urgent_frame_number, last_urgent_frame_number);
3369 }
3370
3371 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003372 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3373 if(idx < 0) {
3374 LOGE("Invalid frame number received: %d. Irrecoverable error",
3375 last_frame_number);
3376 mState = ERROR;
3377 pthread_mutex_unlock(&mMutex);
3378 return;
3379 }
3380 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003381 frameNumDiff = last_frame_number + 1 -
3382 first_frame_number;
3383 mPendingBatchMap.removeItem(last_frame_number);
3384
3385 LOGD("frm: valid: %d frm_num: %d - %d",
3386 frame_number_valid,
3387 first_frame_number, last_frame_number);
3388
3389 }
3390 pthread_mutex_unlock(&mMutex);
3391
3392 if (urgent_frame_number_valid || frame_number_valid) {
3393 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3394 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3395 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3396 urgentFrameNumDiff, last_urgent_frame_number);
3397 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3398 LOGE("frameNumDiff: %d frameNum: %d",
3399 frameNumDiff, last_frame_number);
3400 }
3401
3402 for (size_t i = 0; i < loopCount; i++) {
3403 /* handleMetadataWithLock is called even for invalid_metadata for
3404 * pipeline depth calculation */
3405 if (!invalid_metadata) {
3406 /* Infer frame number. Batch metadata contains frame number of the
3407 * last frame */
3408 if (urgent_frame_number_valid) {
3409 if (i < urgentFrameNumDiff) {
3410 urgent_frame_number =
3411 first_urgent_frame_number + i;
3412 LOGD("inferred urgent frame_number: %d",
3413 urgent_frame_number);
3414 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3415 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3416 } else {
3417 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3418 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3419 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3420 }
3421 }
3422
3423 /* Infer frame number. Batch metadata contains frame number of the
3424 * last frame */
3425 if (frame_number_valid) {
3426 if (i < frameNumDiff) {
3427 frame_number = first_frame_number + i;
3428 LOGD("inferred frame_number: %d", frame_number);
3429 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3430 CAM_INTF_META_FRAME_NUMBER, frame_number);
3431 } else {
3432 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3433 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3434 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3435 }
3436 }
3437
3438 if (last_frame_capture_time) {
3439 //Infer timestamp
3440 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003441 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003442 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003443 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003444 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3445 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3446 LOGD("batch capture_time: %lld, capture_time: %lld",
3447 last_frame_capture_time, capture_time);
3448 }
3449 }
3450 pthread_mutex_lock(&mMutex);
3451 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003452 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003453 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3454 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003455 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003456 pthread_mutex_unlock(&mMutex);
3457 }
3458
3459 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003460 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003461 mMetadataChannel->bufDone(metadata_buf);
3462 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003463 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003464 }
3465}
3466
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003467void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3468 camera3_error_msg_code_t errorCode)
3469{
3470 camera3_notify_msg_t notify_msg;
3471 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3472 notify_msg.type = CAMERA3_MSG_ERROR;
3473 notify_msg.message.error.error_code = errorCode;
3474 notify_msg.message.error.error_stream = NULL;
3475 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003476 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003477
3478 return;
3479}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003480
3481/*===========================================================================
3482 * FUNCTION : sendPartialMetadataWithLock
3483 *
3484 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3485 *
3486 * PARAMETERS : @metadata: metadata buffer
3487 * @requestIter: The iterator for the pending capture request for
3488 * which the partial result is being sen
3489 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3490 * last urgent metadata in a batch. Always true for non-batch mode
3491 *
3492 * RETURN :
3493 *
3494 *==========================================================================*/
3495
3496void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3497 metadata_buffer_t *metadata,
3498 const pendingRequestIterator requestIter,
3499 bool lastUrgentMetadataInBatch)
3500{
3501 camera3_capture_result_t result;
3502 memset(&result, 0, sizeof(camera3_capture_result_t));
3503
3504 requestIter->partial_result_cnt++;
3505
3506 // Extract 3A metadata
3507 result.result = translateCbUrgentMetadataToResultMetadata(
3508 metadata, lastUrgentMetadataInBatch);
3509 // Populate metadata result
3510 result.frame_number = requestIter->frame_number;
3511 result.num_output_buffers = 0;
3512 result.output_buffers = NULL;
3513 result.partial_result = requestIter->partial_result_cnt;
3514
3515 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003516 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003517 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3518 // Notify HDR+ client about the partial metadata.
3519 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3520 result.partial_result == PARTIAL_RESULT_COUNT);
3521 }
3522 }
3523
3524 orchestrateResult(&result);
3525 LOGD("urgent frame_number = %u", result.frame_number);
3526 free_camera_metadata((camera_metadata_t *)result.result);
3527}
3528
Thierry Strudel3d639192016-09-09 11:52:26 -07003529/*===========================================================================
3530 * FUNCTION : handleMetadataWithLock
3531 *
3532 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3533 *
3534 * PARAMETERS : @metadata_buf: metadata buffer
3535 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3536 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003537 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3538 * last urgent metadata in a batch. Always true for non-batch mode
3539 * @lastMetadataInBatch: Boolean to indicate whether this is the
3540 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003541 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3542 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003543 *
3544 * RETURN :
3545 *
3546 *==========================================================================*/
3547void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003548 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003549 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3550 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003551{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003552 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003553 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3554 //during flush do not send metadata from this thread
3555 LOGD("not sending metadata during flush or when mState is error");
3556 if (free_and_bufdone_meta_buf) {
3557 mMetadataChannel->bufDone(metadata_buf);
3558 free(metadata_buf);
3559 }
3560 return;
3561 }
3562
3563 //not in flush
3564 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3565 int32_t frame_number_valid, urgent_frame_number_valid;
3566 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003567 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003568 nsecs_t currentSysTime;
3569
3570 int32_t *p_frame_number_valid =
3571 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3572 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3573 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003574 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003575 int32_t *p_urgent_frame_number_valid =
3576 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3577 uint32_t *p_urgent_frame_number =
3578 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3579 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3580 metadata) {
3581 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3582 *p_frame_number_valid, *p_frame_number);
3583 }
3584
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003585 camera_metadata_t *resultMetadata = nullptr;
3586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3588 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3589 LOGE("Invalid metadata");
3590 if (free_and_bufdone_meta_buf) {
3591 mMetadataChannel->bufDone(metadata_buf);
3592 free(metadata_buf);
3593 }
3594 goto done_metadata;
3595 }
3596 frame_number_valid = *p_frame_number_valid;
3597 frame_number = *p_frame_number;
3598 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003599 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003600 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3601 urgent_frame_number = *p_urgent_frame_number;
3602 currentSysTime = systemTime(CLOCK_MONOTONIC);
3603
Jason Lee603176d2017-05-31 11:43:27 -07003604 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3605 const int tries = 3;
3606 nsecs_t bestGap, measured;
3607 for (int i = 0; i < tries; ++i) {
3608 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3609 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3610 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3611 const nsecs_t gap = tmono2 - tmono;
3612 if (i == 0 || gap < bestGap) {
3613 bestGap = gap;
3614 measured = tbase - ((tmono + tmono2) >> 1);
3615 }
3616 }
3617 capture_time -= measured;
3618 }
3619
Thierry Strudel3d639192016-09-09 11:52:26 -07003620 // Detect if buffers from any requests are overdue
3621 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003622 int64_t timeout;
3623 {
3624 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3625 // If there is a pending HDR+ request, the following requests may be blocked until the
3626 // HDR+ request is done. So allow a longer timeout.
3627 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3628 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3629 }
3630
3631 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003632 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003633 assert(missed.stream->priv);
3634 if (missed.stream->priv) {
3635 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3636 assert(ch->mStreams[0]);
3637 if (ch->mStreams[0]) {
3638 LOGE("Cancel missing frame = %d, buffer = %p,"
3639 "stream type = %d, stream format = %d",
3640 req.frame_number, missed.buffer,
3641 ch->mStreams[0]->getMyType(), missed.stream->format);
3642 ch->timeoutFrame(req.frame_number);
3643 }
3644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 }
3646 }
3647 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003648 //For the very first metadata callback, regardless whether it contains valid
3649 //frame number, send the partial metadata for the jumpstarting requests.
3650 //Note that this has to be done even if the metadata doesn't contain valid
3651 //urgent frame number, because in the case only 1 request is ever submitted
3652 //to HAL, there won't be subsequent valid urgent frame number.
3653 if (mFirstMetadataCallback) {
3654 for (pendingRequestIterator i =
3655 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3656 if (i->bUseFirstPartial) {
3657 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3658 }
3659 }
3660 mFirstMetadataCallback = false;
3661 }
3662
Thierry Strudel3d639192016-09-09 11:52:26 -07003663 //Partial result on process_capture_result for timestamp
3664 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003665 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003666
3667 //Recieved an urgent Frame Number, handle it
3668 //using partial results
3669 for (pendingRequestIterator i =
3670 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3671 LOGD("Iterator Frame = %d urgent frame = %d",
3672 i->frame_number, urgent_frame_number);
3673
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003674 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003675 (i->partial_result_cnt == 0)) {
3676 LOGE("Error: HAL missed urgent metadata for frame number %d",
3677 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003678 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003679 }
3680
3681 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003682 i->partial_result_cnt == 0) {
3683 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003684 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3685 // Instant AEC settled for this frame.
3686 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3687 mInstantAECSettledFrameNumber = urgent_frame_number;
3688 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003689 break;
3690 }
3691 }
3692 }
3693
3694 if (!frame_number_valid) {
3695 LOGD("Not a valid normal frame number, used as SOF only");
3696 if (free_and_bufdone_meta_buf) {
3697 mMetadataChannel->bufDone(metadata_buf);
3698 free(metadata_buf);
3699 }
3700 goto done_metadata;
3701 }
3702 LOGH("valid frame_number = %u, capture_time = %lld",
3703 frame_number, capture_time);
3704
Emilian Peev7650c122017-01-19 08:24:33 -08003705 if (metadata->is_depth_data_valid) {
3706 handleDepthDataLocked(metadata->depth_data, frame_number);
3707 }
3708
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003709 // Check whether any stream buffer corresponding to this is dropped or not
3710 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3711 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3712 for (auto & pendingRequest : mPendingRequestsList) {
3713 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3714 mInstantAECSettledFrameNumber)) {
3715 camera3_notify_msg_t notify_msg = {};
3716 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003717 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003718 QCamera3ProcessingChannel *channel =
3719 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003720 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003721 if (p_cam_frame_drop) {
3722 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003723 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003724 // Got the stream ID for drop frame.
3725 dropFrame = true;
3726 break;
3727 }
3728 }
3729 } else {
3730 // This is instant AEC case.
3731 // For instant AEC drop the stream untill AEC is settled.
3732 dropFrame = true;
3733 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003734
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003735 if (dropFrame) {
3736 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3737 if (p_cam_frame_drop) {
3738 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003739 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003740 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003741 } else {
3742 // For instant AEC, inform frame drop and frame number
3743 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3744 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003745 pendingRequest.frame_number, streamID,
3746 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003747 }
3748 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003749 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003750 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003751 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003752 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003753 if (p_cam_frame_drop) {
3754 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003755 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003756 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003757 } else {
3758 // For instant AEC, inform frame drop and frame number
3759 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3760 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003761 pendingRequest.frame_number, streamID,
3762 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003763 }
3764 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003765 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003766 PendingFrameDrop.stream_ID = streamID;
3767 // Add the Frame drop info to mPendingFrameDropList
3768 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003769 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003770 }
3771 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003772 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003773
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003774 for (auto & pendingRequest : mPendingRequestsList) {
3775 // Find the pending request with the frame number.
3776 if (pendingRequest.frame_number == frame_number) {
3777 // Update the sensor timestamp.
3778 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003779
Thierry Strudel3d639192016-09-09 11:52:26 -07003780
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003781 /* Set the timestamp in display metadata so that clients aware of
3782 private_handle such as VT can use this un-modified timestamps.
3783 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003784 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003785
Thierry Strudel3d639192016-09-09 11:52:26 -07003786 // Find channel requiring metadata, meaning internal offline postprocess
3787 // is needed.
3788 //TODO: for now, we don't support two streams requiring metadata at the same time.
3789 // (because we are not making copies, and metadata buffer is not reference counted.
3790 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003791 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3792 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003793 if (iter->need_metadata) {
3794 internalPproc = true;
3795 QCamera3ProcessingChannel *channel =
3796 (QCamera3ProcessingChannel *)iter->stream->priv;
3797 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003798 if(p_is_metabuf_queued != NULL) {
3799 *p_is_metabuf_queued = true;
3800 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003801 break;
3802 }
3803 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003804 for (auto itr = pendingRequest.internalRequestList.begin();
3805 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003806 if (itr->need_metadata) {
3807 internalPproc = true;
3808 QCamera3ProcessingChannel *channel =
3809 (QCamera3ProcessingChannel *)itr->stream->priv;
3810 channel->queueReprocMetadata(metadata_buf);
3811 break;
3812 }
3813 }
3814
Thierry Strudel54dc9782017-02-15 12:12:10 -08003815 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003816
3817 bool *enableZsl = nullptr;
3818 if (gExposeEnableZslKey) {
3819 enableZsl = &pendingRequest.enableZsl;
3820 }
3821
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003822 resultMetadata = translateFromHalMetadata(metadata,
3823 pendingRequest.timestamp, pendingRequest.request_id,
3824 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3825 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003826 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003827 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003828 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003829 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003830 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003831 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003832
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003833 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003834
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003835 if (pendingRequest.blob_request) {
3836 //Dump tuning metadata if enabled and available
3837 char prop[PROPERTY_VALUE_MAX];
3838 memset(prop, 0, sizeof(prop));
3839 property_get("persist.camera.dumpmetadata", prop, "0");
3840 int32_t enabled = atoi(prop);
3841 if (enabled && metadata->is_tuning_params_valid) {
3842 dumpMetadataToFile(metadata->tuning_params,
3843 mMetaFrameCount,
3844 enabled,
3845 "Snapshot",
3846 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003847 }
3848 }
3849
3850 if (!internalPproc) {
3851 LOGD("couldn't find need_metadata for this metadata");
3852 // Return metadata buffer
3853 if (free_and_bufdone_meta_buf) {
3854 mMetadataChannel->bufDone(metadata_buf);
3855 free(metadata_buf);
3856 }
3857 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003858
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003859 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003860 }
3861 }
3862
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003863 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3864
3865 // Try to send out capture result metadata.
3866 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003867 return;
3868
Thierry Strudel3d639192016-09-09 11:52:26 -07003869done_metadata:
3870 for (pendingRequestIterator i = mPendingRequestsList.begin();
3871 i != mPendingRequestsList.end() ;i++) {
3872 i->pipeline_depth++;
3873 }
3874 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3875 unblockRequestIfNecessary();
3876}
3877
3878/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003879 * FUNCTION : handleDepthDataWithLock
3880 *
3881 * DESCRIPTION: Handles incoming depth data
3882 *
3883 * PARAMETERS : @depthData : Depth data
3884 * @frameNumber: Frame number of the incoming depth data
3885 *
3886 * RETURN :
3887 *
3888 *==========================================================================*/
3889void QCamera3HardwareInterface::handleDepthDataLocked(
3890 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3891 uint32_t currentFrameNumber;
3892 buffer_handle_t *depthBuffer;
3893
3894 if (nullptr == mDepthChannel) {
3895 LOGE("Depth channel not present!");
3896 return;
3897 }
3898
3899 camera3_stream_buffer_t resultBuffer =
3900 {.acquire_fence = -1,
3901 .release_fence = -1,
3902 .status = CAMERA3_BUFFER_STATUS_OK,
3903 .buffer = nullptr,
3904 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003905 do {
3906 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3907 if (nullptr == depthBuffer) {
3908 break;
3909 }
3910
Emilian Peev7650c122017-01-19 08:24:33 -08003911 resultBuffer.buffer = depthBuffer;
3912 if (currentFrameNumber == frameNumber) {
3913 int32_t rc = mDepthChannel->populateDepthData(depthData,
3914 frameNumber);
3915 if (NO_ERROR != rc) {
3916 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3917 } else {
3918 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3919 }
3920 } else if (currentFrameNumber > frameNumber) {
3921 break;
3922 } else {
3923 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3924 {{currentFrameNumber, mDepthChannel->getStream(),
3925 CAMERA3_MSG_ERROR_BUFFER}}};
3926 orchestrateNotify(&notify_msg);
3927
3928 LOGE("Depth buffer for frame number: %d is missing "
3929 "returning back!", currentFrameNumber);
3930 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3931 }
3932 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003933 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003934 } while (currentFrameNumber < frameNumber);
3935}
3936
3937/*===========================================================================
3938 * FUNCTION : notifyErrorFoPendingDepthData
3939 *
3940 * DESCRIPTION: Returns error for any pending depth buffers
3941 *
3942 * PARAMETERS : depthCh - depth channel that needs to get flushed
3943 *
3944 * RETURN :
3945 *
3946 *==========================================================================*/
3947void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3948 QCamera3DepthChannel *depthCh) {
3949 uint32_t currentFrameNumber;
3950 buffer_handle_t *depthBuffer;
3951
3952 if (nullptr == depthCh) {
3953 return;
3954 }
3955
3956 camera3_notify_msg_t notify_msg =
3957 {.type = CAMERA3_MSG_ERROR,
3958 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3959 camera3_stream_buffer_t resultBuffer =
3960 {.acquire_fence = -1,
3961 .release_fence = -1,
3962 .buffer = nullptr,
3963 .stream = depthCh->getStream(),
3964 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003965
3966 while (nullptr !=
3967 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3968 depthCh->unmapBuffer(currentFrameNumber);
3969
3970 notify_msg.message.error.frame_number = currentFrameNumber;
3971 orchestrateNotify(&notify_msg);
3972
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003973 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003974 };
3975}
3976
3977/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003978 * FUNCTION : hdrPlusPerfLock
3979 *
3980 * DESCRIPTION: perf lock for HDR+ using custom intent
3981 *
3982 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3983 *
3984 * RETURN : None
3985 *
3986 *==========================================================================*/
3987void QCamera3HardwareInterface::hdrPlusPerfLock(
3988 mm_camera_super_buf_t *metadata_buf)
3989{
3990 if (NULL == metadata_buf) {
3991 LOGE("metadata_buf is NULL");
3992 return;
3993 }
3994 metadata_buffer_t *metadata =
3995 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3996 int32_t *p_frame_number_valid =
3997 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3998 uint32_t *p_frame_number =
3999 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4000
4001 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4002 LOGE("%s: Invalid metadata", __func__);
4003 return;
4004 }
4005
4006 //acquire perf lock for 5 sec after the last HDR frame is captured
4007 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4008 if ((p_frame_number != NULL) &&
4009 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004010 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004011 }
4012 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004013}
4014
4015/*===========================================================================
4016 * FUNCTION : handleInputBufferWithLock
4017 *
4018 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4019 *
4020 * PARAMETERS : @frame_number: frame number of the input buffer
4021 *
4022 * RETURN :
4023 *
4024 *==========================================================================*/
4025void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4026{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004027 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004028 pendingRequestIterator i = mPendingRequestsList.begin();
4029 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4030 i++;
4031 }
4032 if (i != mPendingRequestsList.end() && i->input_buffer) {
4033 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004034 CameraMetadata settings;
4035 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4036 if(i->settings) {
4037 settings = i->settings;
4038 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4039 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004040 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004041 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004042 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004043 } else {
4044 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004045 }
4046
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004047 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4048 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4049 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004050
4051 camera3_capture_result result;
4052 memset(&result, 0, sizeof(camera3_capture_result));
4053 result.frame_number = frame_number;
4054 result.result = i->settings;
4055 result.input_buffer = i->input_buffer;
4056 result.partial_result = PARTIAL_RESULT_COUNT;
4057
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004058 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004059 LOGD("Input request metadata and input buffer frame_number = %u",
4060 i->frame_number);
4061 i = erasePendingRequest(i);
4062 } else {
4063 LOGE("Could not find input request for frame number %d", frame_number);
4064 }
4065}
4066
4067/*===========================================================================
4068 * FUNCTION : handleBufferWithLock
4069 *
4070 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4071 *
4072 * PARAMETERS : @buffer: image buffer for the callback
4073 * @frame_number: frame number of the image buffer
4074 *
4075 * RETURN :
4076 *
4077 *==========================================================================*/
4078void QCamera3HardwareInterface::handleBufferWithLock(
4079 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4080{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004081 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004082
4083 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4084 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4085 }
4086
Thierry Strudel3d639192016-09-09 11:52:26 -07004087 /* Nothing to be done during error state */
4088 if ((ERROR == mState) || (DEINIT == mState)) {
4089 return;
4090 }
4091 if (mFlushPerf) {
4092 handleBuffersDuringFlushLock(buffer);
4093 return;
4094 }
4095 //not in flush
4096 // If the frame number doesn't exist in the pending request list,
4097 // directly send the buffer to the frameworks, and update pending buffers map
4098 // Otherwise, book-keep the buffer.
4099 pendingRequestIterator i = mPendingRequestsList.begin();
4100 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4101 i++;
4102 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004103
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004104 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004105 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004106 // For a reprocessing request, try to send out result metadata.
4107 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004108 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004109 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004110
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004111 // Check if this frame was dropped.
4112 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4113 m != mPendingFrameDropList.end(); m++) {
4114 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4115 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4116 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4117 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4118 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4119 frame_number, streamID);
4120 m = mPendingFrameDropList.erase(m);
4121 break;
4122 }
4123 }
4124
4125 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4126 LOGH("result frame_number = %d, buffer = %p",
4127 frame_number, buffer->buffer);
4128
4129 mPendingBuffersMap.removeBuf(buffer->buffer);
4130 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4131
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004132 if (mPreviewStarted == false) {
4133 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4134 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004135 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4136
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004137 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4138 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4139 mPreviewStarted = true;
4140
4141 // Set power hint for preview
4142 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4143 }
4144 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004145}
4146
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004147void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004148 const camera_metadata_t *resultMetadata)
4149{
4150 // Find the pending request for this result metadata.
4151 auto requestIter = mPendingRequestsList.begin();
4152 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4153 requestIter++;
4154 }
4155
4156 if (requestIter == mPendingRequestsList.end()) {
4157 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4158 return;
4159 }
4160
4161 // Update the result metadata
4162 requestIter->resultMetadata = resultMetadata;
4163
4164 // Check what type of request this is.
4165 bool liveRequest = false;
4166 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004167 // HDR+ request doesn't have partial results.
4168 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004169 } else if (requestIter->input_buffer != nullptr) {
4170 // Reprocessing request result is the same as settings.
4171 requestIter->resultMetadata = requestIter->settings;
4172 // Reprocessing request doesn't have partial results.
4173 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4174 } else {
4175 liveRequest = true;
4176 requestIter->partial_result_cnt++;
4177 mPendingLiveRequest--;
4178
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004179 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004180 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004181 // For a live request, send the metadata to HDR+ client.
4182 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4183 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4184 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4185 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004186 }
4187 }
4188
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004189 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4190 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004191 bool readyToSend = true;
4192
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004193 // Iterate through the pending requests to send out result metadata that are ready. Also if
4194 // this result metadata belongs to a live request, notify errors for previous live requests
4195 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004196 auto iter = mPendingRequestsList.begin();
4197 while (iter != mPendingRequestsList.end()) {
4198 // Check if current pending request is ready. If it's not ready, the following pending
4199 // requests are also not ready.
4200 if (readyToSend && iter->resultMetadata == nullptr) {
4201 readyToSend = false;
4202 }
4203
4204 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4205
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004206 camera3_capture_result_t result = {};
4207 result.frame_number = iter->frame_number;
4208 result.result = iter->resultMetadata;
4209 result.partial_result = iter->partial_result_cnt;
4210
4211 // If this pending buffer has result metadata, we may be able to send out shutter callback
4212 // and result metadata.
4213 if (iter->resultMetadata != nullptr) {
4214 if (!readyToSend) {
4215 // If any of the previous pending request is not ready, this pending request is
4216 // also not ready to send in order to keep shutter callbacks and result metadata
4217 // in order.
4218 iter++;
4219 continue;
4220 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004221 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4222 // If the result metadata belongs to a live request, notify errors for previous pending
4223 // live requests.
4224 mPendingLiveRequest--;
4225
4226 CameraMetadata dummyMetadata;
4227 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4228 result.result = dummyMetadata.release();
4229
4230 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004231
4232 // partial_result should be PARTIAL_RESULT_CNT in case of
4233 // ERROR_RESULT.
4234 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4235 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004236 } else {
4237 iter++;
4238 continue;
4239 }
4240
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004241 result.output_buffers = nullptr;
4242 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004243 orchestrateResult(&result);
4244
4245 // For reprocessing, result metadata is the same as settings so do not free it here to
4246 // avoid double free.
4247 if (result.result != iter->settings) {
4248 free_camera_metadata((camera_metadata_t *)result.result);
4249 }
4250 iter->resultMetadata = nullptr;
4251 iter = erasePendingRequest(iter);
4252 }
4253
4254 if (liveRequest) {
4255 for (auto &iter : mPendingRequestsList) {
4256 // Increment pipeline depth for the following pending requests.
4257 if (iter.frame_number > frameNumber) {
4258 iter.pipeline_depth++;
4259 }
4260 }
4261 }
4262
4263 unblockRequestIfNecessary();
4264}
4265
Thierry Strudel3d639192016-09-09 11:52:26 -07004266/*===========================================================================
4267 * FUNCTION : unblockRequestIfNecessary
4268 *
4269 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4270 * that mMutex is held when this function is called.
4271 *
4272 * PARAMETERS :
4273 *
4274 * RETURN :
4275 *
4276 *==========================================================================*/
4277void QCamera3HardwareInterface::unblockRequestIfNecessary()
4278{
4279 // Unblock process_capture_request
4280 pthread_cond_signal(&mRequestCond);
4281}
4282
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004283/*===========================================================================
4284 * FUNCTION : isHdrSnapshotRequest
4285 *
4286 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4287 *
4288 * PARAMETERS : camera3 request structure
4289 *
4290 * RETURN : boolean decision variable
4291 *
4292 *==========================================================================*/
4293bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4294{
4295 if (request == NULL) {
4296 LOGE("Invalid request handle");
4297 assert(0);
4298 return false;
4299 }
4300
4301 if (!mForceHdrSnapshot) {
4302 CameraMetadata frame_settings;
4303 frame_settings = request->settings;
4304
4305 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4306 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4307 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4308 return false;
4309 }
4310 } else {
4311 return false;
4312 }
4313
4314 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4315 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4316 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4317 return false;
4318 }
4319 } else {
4320 return false;
4321 }
4322 }
4323
4324 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4325 if (request->output_buffers[i].stream->format
4326 == HAL_PIXEL_FORMAT_BLOB) {
4327 return true;
4328 }
4329 }
4330
4331 return false;
4332}
4333/*===========================================================================
4334 * FUNCTION : orchestrateRequest
4335 *
4336 * DESCRIPTION: Orchestrates a capture request from camera service
4337 *
4338 * PARAMETERS :
4339 * @request : request from framework to process
4340 *
4341 * RETURN : Error status codes
4342 *
4343 *==========================================================================*/
4344int32_t QCamera3HardwareInterface::orchestrateRequest(
4345 camera3_capture_request_t *request)
4346{
4347
4348 uint32_t originalFrameNumber = request->frame_number;
4349 uint32_t originalOutputCount = request->num_output_buffers;
4350 const camera_metadata_t *original_settings = request->settings;
4351 List<InternalRequest> internallyRequestedStreams;
4352 List<InternalRequest> emptyInternalList;
4353
4354 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4355 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4356 uint32_t internalFrameNumber;
4357 CameraMetadata modified_meta;
4358
4359
4360 /* Add Blob channel to list of internally requested streams */
4361 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4362 if (request->output_buffers[i].stream->format
4363 == HAL_PIXEL_FORMAT_BLOB) {
4364 InternalRequest streamRequested;
4365 streamRequested.meteringOnly = 1;
4366 streamRequested.need_metadata = 0;
4367 streamRequested.stream = request->output_buffers[i].stream;
4368 internallyRequestedStreams.push_back(streamRequested);
4369 }
4370 }
4371 request->num_output_buffers = 0;
4372 auto itr = internallyRequestedStreams.begin();
4373
4374 /* Modify setting to set compensation */
4375 modified_meta = request->settings;
4376 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4377 uint8_t aeLock = 1;
4378 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4379 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4380 camera_metadata_t *modified_settings = modified_meta.release();
4381 request->settings = modified_settings;
4382
4383 /* Capture Settling & -2x frame */
4384 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4385 request->frame_number = internalFrameNumber;
4386 processCaptureRequest(request, internallyRequestedStreams);
4387
4388 request->num_output_buffers = originalOutputCount;
4389 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4390 request->frame_number = internalFrameNumber;
4391 processCaptureRequest(request, emptyInternalList);
4392 request->num_output_buffers = 0;
4393
4394 modified_meta = modified_settings;
4395 expCompensation = 0;
4396 aeLock = 1;
4397 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4398 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4399 modified_settings = modified_meta.release();
4400 request->settings = modified_settings;
4401
4402 /* Capture Settling & 0X frame */
4403
4404 itr = internallyRequestedStreams.begin();
4405 if (itr == internallyRequestedStreams.end()) {
4406 LOGE("Error Internally Requested Stream list is empty");
4407 assert(0);
4408 } else {
4409 itr->need_metadata = 0;
4410 itr->meteringOnly = 1;
4411 }
4412
4413 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4414 request->frame_number = internalFrameNumber;
4415 processCaptureRequest(request, internallyRequestedStreams);
4416
4417 itr = internallyRequestedStreams.begin();
4418 if (itr == internallyRequestedStreams.end()) {
4419 ALOGE("Error Internally Requested Stream list is empty");
4420 assert(0);
4421 } else {
4422 itr->need_metadata = 1;
4423 itr->meteringOnly = 0;
4424 }
4425
4426 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4427 request->frame_number = internalFrameNumber;
4428 processCaptureRequest(request, internallyRequestedStreams);
4429
4430 /* Capture 2X frame*/
4431 modified_meta = modified_settings;
4432 expCompensation = GB_HDR_2X_STEP_EV;
4433 aeLock = 1;
4434 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4435 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4436 modified_settings = modified_meta.release();
4437 request->settings = modified_settings;
4438
4439 itr = internallyRequestedStreams.begin();
4440 if (itr == internallyRequestedStreams.end()) {
4441 ALOGE("Error Internally Requested Stream list is empty");
4442 assert(0);
4443 } else {
4444 itr->need_metadata = 0;
4445 itr->meteringOnly = 1;
4446 }
4447 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4448 request->frame_number = internalFrameNumber;
4449 processCaptureRequest(request, internallyRequestedStreams);
4450
4451 itr = internallyRequestedStreams.begin();
4452 if (itr == internallyRequestedStreams.end()) {
4453 ALOGE("Error Internally Requested Stream list is empty");
4454 assert(0);
4455 } else {
4456 itr->need_metadata = 1;
4457 itr->meteringOnly = 0;
4458 }
4459
4460 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4461 request->frame_number = internalFrameNumber;
4462 processCaptureRequest(request, internallyRequestedStreams);
4463
4464
4465 /* Capture 2X on original streaming config*/
4466 internallyRequestedStreams.clear();
4467
4468 /* Restore original settings pointer */
4469 request->settings = original_settings;
4470 } else {
4471 uint32_t internalFrameNumber;
4472 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4473 request->frame_number = internalFrameNumber;
4474 return processCaptureRequest(request, internallyRequestedStreams);
4475 }
4476
4477 return NO_ERROR;
4478}
4479
4480/*===========================================================================
4481 * FUNCTION : orchestrateResult
4482 *
4483 * DESCRIPTION: Orchestrates a capture result to camera service
4484 *
4485 * PARAMETERS :
4486 * @request : request from framework to process
4487 *
4488 * RETURN :
4489 *
4490 *==========================================================================*/
4491void QCamera3HardwareInterface::orchestrateResult(
4492 camera3_capture_result_t *result)
4493{
4494 uint32_t frameworkFrameNumber;
4495 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4496 frameworkFrameNumber);
4497 if (rc != NO_ERROR) {
4498 LOGE("Cannot find translated frameworkFrameNumber");
4499 assert(0);
4500 } else {
4501 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004502 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004503 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004504 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004505 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4506 camera_metadata_entry_t entry;
4507 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4508 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004509 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004510 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4511 if (ret != OK)
4512 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004513 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004514 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004515 result->frame_number = frameworkFrameNumber;
4516 mCallbackOps->process_capture_result(mCallbackOps, result);
4517 }
4518 }
4519}
4520
4521/*===========================================================================
4522 * FUNCTION : orchestrateNotify
4523 *
4524 * DESCRIPTION: Orchestrates a notify to camera service
4525 *
4526 * PARAMETERS :
4527 * @request : request from framework to process
4528 *
4529 * RETURN :
4530 *
4531 *==========================================================================*/
4532void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4533{
4534 uint32_t frameworkFrameNumber;
4535 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004536 int32_t rc = NO_ERROR;
4537
4538 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004539 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004540
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004541 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004542 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4543 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4544 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004545 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004546 LOGE("Cannot find translated frameworkFrameNumber");
4547 assert(0);
4548 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004549 }
4550 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004551
4552 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4553 LOGD("Internal Request drop the notifyCb");
4554 } else {
4555 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4556 mCallbackOps->notify(mCallbackOps, notify_msg);
4557 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004558}
4559
4560/*===========================================================================
4561 * FUNCTION : FrameNumberRegistry
4562 *
4563 * DESCRIPTION: Constructor
4564 *
4565 * PARAMETERS :
4566 *
4567 * RETURN :
4568 *
4569 *==========================================================================*/
4570FrameNumberRegistry::FrameNumberRegistry()
4571{
4572 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4573}
4574
4575/*===========================================================================
4576 * FUNCTION : ~FrameNumberRegistry
4577 *
4578 * DESCRIPTION: Destructor
4579 *
4580 * PARAMETERS :
4581 *
4582 * RETURN :
4583 *
4584 *==========================================================================*/
4585FrameNumberRegistry::~FrameNumberRegistry()
4586{
4587}
4588
4589/*===========================================================================
4590 * FUNCTION : PurgeOldEntriesLocked
4591 *
4592 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4593 *
4594 * PARAMETERS :
4595 *
4596 * RETURN : NONE
4597 *
4598 *==========================================================================*/
4599void FrameNumberRegistry::purgeOldEntriesLocked()
4600{
4601 while (_register.begin() != _register.end()) {
4602 auto itr = _register.begin();
4603 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4604 _register.erase(itr);
4605 } else {
4606 return;
4607 }
4608 }
4609}
4610
4611/*===========================================================================
4612 * FUNCTION : allocStoreInternalFrameNumber
4613 *
4614 * DESCRIPTION: Method to note down a framework request and associate a new
4615 * internal request number against it
4616 *
4617 * PARAMETERS :
4618 * @fFrameNumber: Identifier given by framework
4619 * @internalFN : Output parameter which will have the newly generated internal
4620 * entry
4621 *
4622 * RETURN : Error code
4623 *
4624 *==========================================================================*/
4625int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4626 uint32_t &internalFrameNumber)
4627{
4628 Mutex::Autolock lock(mRegistryLock);
4629 internalFrameNumber = _nextFreeInternalNumber++;
4630 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4631 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4632 purgeOldEntriesLocked();
4633 return NO_ERROR;
4634}
4635
4636/*===========================================================================
4637 * FUNCTION : generateStoreInternalFrameNumber
4638 *
4639 * DESCRIPTION: Method to associate a new internal request number independent
4640 * of any associate with framework requests
4641 *
4642 * PARAMETERS :
4643 * @internalFrame#: Output parameter which will have the newly generated internal
4644 *
4645 *
4646 * RETURN : Error code
4647 *
4648 *==========================================================================*/
4649int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4650{
4651 Mutex::Autolock lock(mRegistryLock);
4652 internalFrameNumber = _nextFreeInternalNumber++;
4653 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4654 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4655 purgeOldEntriesLocked();
4656 return NO_ERROR;
4657}
4658
4659/*===========================================================================
4660 * FUNCTION : getFrameworkFrameNumber
4661 *
4662 * DESCRIPTION: Method to query the framework framenumber given an internal #
4663 *
4664 * PARAMETERS :
4665 * @internalFrame#: Internal reference
4666 * @frameworkframenumber: Output parameter holding framework frame entry
4667 *
4668 * RETURN : Error code
4669 *
4670 *==========================================================================*/
4671int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4672 uint32_t &frameworkFrameNumber)
4673{
4674 Mutex::Autolock lock(mRegistryLock);
4675 auto itr = _register.find(internalFrameNumber);
4676 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004677 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004678 return -ENOENT;
4679 }
4680
4681 frameworkFrameNumber = itr->second;
4682 purgeOldEntriesLocked();
4683 return NO_ERROR;
4684}
Thierry Strudel3d639192016-09-09 11:52:26 -07004685
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004686status_t QCamera3HardwareInterface::fillPbStreamConfig(
4687 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4688 QCamera3Channel *channel, uint32_t streamIndex) {
4689 if (config == nullptr) {
4690 LOGE("%s: config is null", __FUNCTION__);
4691 return BAD_VALUE;
4692 }
4693
4694 if (channel == nullptr) {
4695 LOGE("%s: channel is null", __FUNCTION__);
4696 return BAD_VALUE;
4697 }
4698
4699 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4700 if (stream == nullptr) {
4701 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4702 return NAME_NOT_FOUND;
4703 }
4704
4705 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4706 if (streamInfo == nullptr) {
4707 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4708 return NAME_NOT_FOUND;
4709 }
4710
4711 config->id = pbStreamId;
4712 config->image.width = streamInfo->dim.width;
4713 config->image.height = streamInfo->dim.height;
4714 config->image.padding = 0;
4715 config->image.format = pbStreamFormat;
4716
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004717 uint32_t totalPlaneSize = 0;
4718
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004719 // Fill plane information.
4720 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4721 pbcamera::PlaneConfiguration plane;
4722 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4723 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4724 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004725
4726 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004727 }
4728
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004729 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004730 return OK;
4731}
4732
Thierry Strudel3d639192016-09-09 11:52:26 -07004733/*===========================================================================
4734 * FUNCTION : processCaptureRequest
4735 *
4736 * DESCRIPTION: process a capture request from camera service
4737 *
4738 * PARAMETERS :
4739 * @request : request from framework to process
4740 *
4741 * RETURN :
4742 *
4743 *==========================================================================*/
4744int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004745 camera3_capture_request_t *request,
4746 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004747{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004748 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004749 int rc = NO_ERROR;
4750 int32_t request_id;
4751 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004752 bool isVidBufRequested = false;
4753 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004754 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004755
4756 pthread_mutex_lock(&mMutex);
4757
4758 // Validate current state
4759 switch (mState) {
4760 case CONFIGURED:
4761 case STARTED:
4762 /* valid state */
4763 break;
4764
4765 case ERROR:
4766 pthread_mutex_unlock(&mMutex);
4767 handleCameraDeviceError();
4768 return -ENODEV;
4769
4770 default:
4771 LOGE("Invalid state %d", mState);
4772 pthread_mutex_unlock(&mMutex);
4773 return -ENODEV;
4774 }
4775
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004776 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004777 if (rc != NO_ERROR) {
4778 LOGE("incoming request is not valid");
4779 pthread_mutex_unlock(&mMutex);
4780 return rc;
4781 }
4782
4783 meta = request->settings;
4784
4785 // For first capture request, send capture intent, and
4786 // stream on all streams
4787 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004788 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004789 // send an unconfigure to the backend so that the isp
4790 // resources are deallocated
4791 if (!mFirstConfiguration) {
4792 cam_stream_size_info_t stream_config_info;
4793 int32_t hal_version = CAM_HAL_V3;
4794 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4795 stream_config_info.buffer_info.min_buffers =
4796 MIN_INFLIGHT_REQUESTS;
4797 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004798 m_bIs4KVideo ? 0 :
4799 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004800 clear_metadata_buffer(mParameters);
4801 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4802 CAM_INTF_PARM_HAL_VERSION, hal_version);
4803 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4804 CAM_INTF_META_STREAM_INFO, stream_config_info);
4805 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4806 mParameters);
4807 if (rc < 0) {
4808 LOGE("set_parms for unconfigure failed");
4809 pthread_mutex_unlock(&mMutex);
4810 return rc;
4811 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004812
Thierry Strudel3d639192016-09-09 11:52:26 -07004813 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004814 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004815 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004816 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004817 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004818 property_get("persist.camera.is_type", is_type_value, "4");
4819 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4820 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4821 property_get("persist.camera.is_type_preview", is_type_value, "4");
4822 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4823 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004824
4825 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4826 int32_t hal_version = CAM_HAL_V3;
4827 uint8_t captureIntent =
4828 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4829 mCaptureIntent = captureIntent;
4830 clear_metadata_buffer(mParameters);
4831 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4832 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4833 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004834 if (mFirstConfiguration) {
4835 // configure instant AEC
4836 // Instant AEC is a session based parameter and it is needed only
4837 // once per complete session after open camera.
4838 // i.e. This is set only once for the first capture request, after open camera.
4839 setInstantAEC(meta);
4840 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004841 uint8_t fwkVideoStabMode=0;
4842 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4843 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4844 }
4845
Xue Tuecac74e2017-04-17 13:58:15 -07004846 // If EIS setprop is enabled then only turn it on for video/preview
4847 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004848 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004849 int32_t vsMode;
4850 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4852 rc = BAD_VALUE;
4853 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004854 LOGD("setEis %d", setEis);
4855 bool eis3Supported = false;
4856 size_t count = IS_TYPE_MAX;
4857 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4858 for (size_t i = 0; i < count; i++) {
4859 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4860 eis3Supported = true;
4861 break;
4862 }
4863 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004864
4865 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004866 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004867 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4868 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004869 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4870 is_type = isTypePreview;
4871 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4872 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4873 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004874 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004875 } else {
4876 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004877 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004878 } else {
4879 is_type = IS_TYPE_NONE;
4880 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004881 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004882 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004883 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4884 }
4885 }
4886
4887 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4888 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4889
Thierry Strudel54dc9782017-02-15 12:12:10 -08004890 //Disable tintless only if the property is set to 0
4891 memset(prop, 0, sizeof(prop));
4892 property_get("persist.camera.tintless.enable", prop, "1");
4893 int32_t tintless_value = atoi(prop);
4894
Thierry Strudel3d639192016-09-09 11:52:26 -07004895 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4896 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004897
Thierry Strudel3d639192016-09-09 11:52:26 -07004898 //Disable CDS for HFR mode or if DIS/EIS is on.
4899 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4900 //after every configure_stream
4901 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4902 (m_bIsVideo)) {
4903 int32_t cds = CAM_CDS_MODE_OFF;
4904 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4905 CAM_INTF_PARM_CDS_MODE, cds))
4906 LOGE("Failed to disable CDS for HFR mode");
4907
4908 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004909
4910 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4911 uint8_t* use_av_timer = NULL;
4912
4913 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004914 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004915 use_av_timer = &m_debug_avtimer;
4916 }
4917 else{
4918 use_av_timer =
4919 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004920 if (use_av_timer) {
4921 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4922 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004923 }
4924
4925 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4926 rc = BAD_VALUE;
4927 }
4928 }
4929
Thierry Strudel3d639192016-09-09 11:52:26 -07004930 setMobicat();
4931
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004932 uint8_t nrMode = 0;
4933 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4934 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4935 }
4936
Thierry Strudel3d639192016-09-09 11:52:26 -07004937 /* Set fps and hfr mode while sending meta stream info so that sensor
4938 * can configure appropriate streaming mode */
4939 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004940 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4941 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004942 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4943 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004944 if (rc == NO_ERROR) {
4945 int32_t max_fps =
4946 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004947 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004948 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4949 }
4950 /* For HFR, more buffers are dequeued upfront to improve the performance */
4951 if (mBatchSize) {
4952 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4953 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4954 }
4955 }
4956 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004957 LOGE("setHalFpsRange failed");
4958 }
4959 }
4960 if (meta.exists(ANDROID_CONTROL_MODE)) {
4961 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4962 rc = extractSceneMode(meta, metaMode, mParameters);
4963 if (rc != NO_ERROR) {
4964 LOGE("extractSceneMode failed");
4965 }
4966 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004967 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004968
Thierry Strudel04e026f2016-10-10 11:27:36 -07004969 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4970 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4971 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4972 rc = setVideoHdrMode(mParameters, vhdr);
4973 if (rc != NO_ERROR) {
4974 LOGE("setVideoHDR is failed");
4975 }
4976 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004977
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004978 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004979 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004980 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004981 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4982 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4983 sensorModeFullFov)) {
4984 rc = BAD_VALUE;
4985 }
4986 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004987 //TODO: validate the arguments, HSV scenemode should have only the
4988 //advertised fps ranges
4989
4990 /*set the capture intent, hal version, tintless, stream info,
4991 *and disenable parameters to the backend*/
4992 LOGD("set_parms META_STREAM_INFO " );
4993 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004994 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4995 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004996 mStreamConfigInfo.type[i],
4997 mStreamConfigInfo.stream_sizes[i].width,
4998 mStreamConfigInfo.stream_sizes[i].height,
4999 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005000 mStreamConfigInfo.format[i],
5001 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005002 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005003
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5005 mParameters);
5006 if (rc < 0) {
5007 LOGE("set_parms failed for hal version, stream info");
5008 }
5009
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005010 cam_sensor_mode_info_t sensorModeInfo = {};
5011 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005012 if (rc != NO_ERROR) {
5013 LOGE("Failed to get sensor output size");
5014 pthread_mutex_unlock(&mMutex);
5015 goto error_exit;
5016 }
5017
5018 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5019 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005020 sensorModeInfo.active_array_size.width,
5021 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005022
5023 /* Set batchmode before initializing channel. Since registerBuffer
5024 * internally initializes some of the channels, better set batchmode
5025 * even before first register buffer */
5026 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5027 it != mStreamInfo.end(); it++) {
5028 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5029 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5030 && mBatchSize) {
5031 rc = channel->setBatchSize(mBatchSize);
5032 //Disable per frame map unmap for HFR/batchmode case
5033 rc |= channel->setPerFrameMapUnmap(false);
5034 if (NO_ERROR != rc) {
5035 LOGE("Channel init failed %d", rc);
5036 pthread_mutex_unlock(&mMutex);
5037 goto error_exit;
5038 }
5039 }
5040 }
5041
5042 //First initialize all streams
5043 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5044 it != mStreamInfo.end(); it++) {
5045 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005046
5047 /* Initial value of NR mode is needed before stream on */
5048 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005049 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5050 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005051 setEis) {
5052 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5053 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5054 is_type = mStreamConfigInfo.is_type[i];
5055 break;
5056 }
5057 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005059 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005060 rc = channel->initialize(IS_TYPE_NONE);
5061 }
5062 if (NO_ERROR != rc) {
5063 LOGE("Channel initialization failed %d", rc);
5064 pthread_mutex_unlock(&mMutex);
5065 goto error_exit;
5066 }
5067 }
5068
5069 if (mRawDumpChannel) {
5070 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5071 if (rc != NO_ERROR) {
5072 LOGE("Error: Raw Dump Channel init failed");
5073 pthread_mutex_unlock(&mMutex);
5074 goto error_exit;
5075 }
5076 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005077 if (mHdrPlusRawSrcChannel) {
5078 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5079 if (rc != NO_ERROR) {
5080 LOGE("Error: HDR+ RAW Source Channel init failed");
5081 pthread_mutex_unlock(&mMutex);
5082 goto error_exit;
5083 }
5084 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005085 if (mSupportChannel) {
5086 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5087 if (rc < 0) {
5088 LOGE("Support channel initialization failed");
5089 pthread_mutex_unlock(&mMutex);
5090 goto error_exit;
5091 }
5092 }
5093 if (mAnalysisChannel) {
5094 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5095 if (rc < 0) {
5096 LOGE("Analysis channel initialization failed");
5097 pthread_mutex_unlock(&mMutex);
5098 goto error_exit;
5099 }
5100 }
5101 if (mDummyBatchChannel) {
5102 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5103 if (rc < 0) {
5104 LOGE("mDummyBatchChannel setBatchSize failed");
5105 pthread_mutex_unlock(&mMutex);
5106 goto error_exit;
5107 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005108 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005109 if (rc < 0) {
5110 LOGE("mDummyBatchChannel initialization failed");
5111 pthread_mutex_unlock(&mMutex);
5112 goto error_exit;
5113 }
5114 }
5115
5116 // Set bundle info
5117 rc = setBundleInfo();
5118 if (rc < 0) {
5119 LOGE("setBundleInfo failed %d", rc);
5120 pthread_mutex_unlock(&mMutex);
5121 goto error_exit;
5122 }
5123
5124 //update settings from app here
5125 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5126 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5127 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5128 }
5129 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5130 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5131 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5132 }
5133 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5134 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5135 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5136
5137 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5138 (mLinkedCameraId != mCameraId) ) {
5139 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5140 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005141 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005142 goto error_exit;
5143 }
5144 }
5145
5146 // add bundle related cameras
5147 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5148 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005149 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5150 &m_pDualCamCmdPtr->bundle_info;
5151 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005152 if (mIsDeviceLinked)
5153 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5154 else
5155 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5156
5157 pthread_mutex_lock(&gCamLock);
5158
5159 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5160 LOGE("Dualcam: Invalid Session Id ");
5161 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005162 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005163 goto error_exit;
5164 }
5165
5166 if (mIsMainCamera == 1) {
5167 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5168 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005169 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005170 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005171 // related session id should be session id of linked session
5172 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5173 } else {
5174 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5175 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005176 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005177 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005178 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5179 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005180 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005181 pthread_mutex_unlock(&gCamLock);
5182
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005183 rc = mCameraHandle->ops->set_dual_cam_cmd(
5184 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005185 if (rc < 0) {
5186 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005187 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005188 goto error_exit;
5189 }
5190 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005191 goto no_error;
5192error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005193 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005194 return rc;
5195no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005196 mWokenUpByDaemon = false;
5197 mPendingLiveRequest = 0;
5198 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005199 }
5200
5201 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005202 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005203
5204 if (mFlushPerf) {
5205 //we cannot accept any requests during flush
5206 LOGE("process_capture_request cannot proceed during flush");
5207 pthread_mutex_unlock(&mMutex);
5208 return NO_ERROR; //should return an error
5209 }
5210
5211 if (meta.exists(ANDROID_REQUEST_ID)) {
5212 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5213 mCurrentRequestId = request_id;
5214 LOGD("Received request with id: %d", request_id);
5215 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5216 LOGE("Unable to find request id field, \
5217 & no previous id available");
5218 pthread_mutex_unlock(&mMutex);
5219 return NAME_NOT_FOUND;
5220 } else {
5221 LOGD("Re-using old request id");
5222 request_id = mCurrentRequestId;
5223 }
5224
5225 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5226 request->num_output_buffers,
5227 request->input_buffer,
5228 frameNumber);
5229 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005230 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005231 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005232 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005233 uint32_t snapshotStreamId = 0;
5234 for (size_t i = 0; i < request->num_output_buffers; i++) {
5235 const camera3_stream_buffer_t& output = request->output_buffers[i];
5236 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5237
Emilian Peev7650c122017-01-19 08:24:33 -08005238 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5239 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005240 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005241 blob_request = 1;
5242 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5243 }
5244
5245 if (output.acquire_fence != -1) {
5246 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5247 close(output.acquire_fence);
5248 if (rc != OK) {
5249 LOGE("sync wait failed %d", rc);
5250 pthread_mutex_unlock(&mMutex);
5251 return rc;
5252 }
5253 }
5254
Emilian Peev0f3c3162017-03-15 12:57:46 +00005255 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5256 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005257 depthRequestPresent = true;
5258 continue;
5259 }
5260
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005261 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005262 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005263
5264 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5265 isVidBufRequested = true;
5266 }
5267 }
5268
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005269 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5270 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5271 itr++) {
5272 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5273 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5274 channel->getStreamID(channel->getStreamTypeMask());
5275
5276 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5277 isVidBufRequested = true;
5278 }
5279 }
5280
Thierry Strudel3d639192016-09-09 11:52:26 -07005281 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005282 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005283 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 }
5285 if (blob_request && mRawDumpChannel) {
5286 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005287 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005288 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005289 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005290 }
5291
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005292 {
5293 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5294 // Request a RAW buffer if
5295 // 1. mHdrPlusRawSrcChannel is valid.
5296 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5297 // 3. There is no pending HDR+ request.
5298 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5299 mHdrPlusPendingRequests.size() == 0) {
5300 streamsArray.stream_request[streamsArray.num_streams].streamID =
5301 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5302 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5303 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005304 }
5305
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005306 //extract capture intent
5307 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5308 mCaptureIntent =
5309 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5310 }
5311
5312 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5313 mCacMode =
5314 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5315 }
5316
5317 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005318 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005319
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005320 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005321 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005322 // If this request has a still capture intent, try to submit an HDR+ request.
5323 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5324 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5325 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5326 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005327 }
5328
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005329 if (hdrPlusRequest) {
5330 // For a HDR+ request, just set the frame parameters.
5331 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5332 if (rc < 0) {
5333 LOGE("fail to set frame parameters");
5334 pthread_mutex_unlock(&mMutex);
5335 return rc;
5336 }
5337 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005338 /* Parse the settings:
5339 * - For every request in NORMAL MODE
5340 * - For every request in HFR mode during preview only case
5341 * - For first request of every batch in HFR mode during video
5342 * recording. In batchmode the same settings except frame number is
5343 * repeated in each request of the batch.
5344 */
5345 if (!mBatchSize ||
5346 (mBatchSize && !isVidBufRequested) ||
5347 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005348 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005349 if (rc < 0) {
5350 LOGE("fail to set frame parameters");
5351 pthread_mutex_unlock(&mMutex);
5352 return rc;
5353 }
5354 }
5355 /* For batchMode HFR, setFrameParameters is not called for every
5356 * request. But only frame number of the latest request is parsed.
5357 * Keep track of first and last frame numbers in a batch so that
5358 * metadata for the frame numbers of batch can be duplicated in
5359 * handleBatchMetadta */
5360 if (mBatchSize) {
5361 if (!mToBeQueuedVidBufs) {
5362 //start of the batch
5363 mFirstFrameNumberInBatch = request->frame_number;
5364 }
5365 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5366 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5367 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005368 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005369 return BAD_VALUE;
5370 }
5371 }
5372 if (mNeedSensorRestart) {
5373 /* Unlock the mutex as restartSensor waits on the channels to be
5374 * stopped, which in turn calls stream callback functions -
5375 * handleBufferWithLock and handleMetadataWithLock */
5376 pthread_mutex_unlock(&mMutex);
5377 rc = dynamicUpdateMetaStreamInfo();
5378 if (rc != NO_ERROR) {
5379 LOGE("Restarting the sensor failed");
5380 return BAD_VALUE;
5381 }
5382 mNeedSensorRestart = false;
5383 pthread_mutex_lock(&mMutex);
5384 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005385 if(mResetInstantAEC) {
5386 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5387 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5388 mResetInstantAEC = false;
5389 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005390 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005391 if (request->input_buffer->acquire_fence != -1) {
5392 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5393 close(request->input_buffer->acquire_fence);
5394 if (rc != OK) {
5395 LOGE("input buffer sync wait failed %d", rc);
5396 pthread_mutex_unlock(&mMutex);
5397 return rc;
5398 }
5399 }
5400 }
5401
5402 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5403 mLastCustIntentFrmNum = frameNumber;
5404 }
5405 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005406 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005407 pendingRequestIterator latestRequest;
5408 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005409 pendingRequest.num_buffers = depthRequestPresent ?
5410 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005411 pendingRequest.request_id = request_id;
5412 pendingRequest.blob_request = blob_request;
5413 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005414 if (request->input_buffer) {
5415 pendingRequest.input_buffer =
5416 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5417 *(pendingRequest.input_buffer) = *(request->input_buffer);
5418 pInputBuffer = pendingRequest.input_buffer;
5419 } else {
5420 pendingRequest.input_buffer = NULL;
5421 pInputBuffer = NULL;
5422 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005423 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005424
5425 pendingRequest.pipeline_depth = 0;
5426 pendingRequest.partial_result_cnt = 0;
5427 extractJpegMetadata(mCurJpegMeta, request);
5428 pendingRequest.jpegMetadata = mCurJpegMeta;
5429 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005430 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005431 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5432 mHybridAeEnable =
5433 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5434 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005435
5436 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5437 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005438 /* DevCamDebug metadata processCaptureRequest */
5439 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5440 mDevCamDebugMetaEnable =
5441 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5442 }
5443 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5444 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005445
5446 //extract CAC info
5447 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5448 mCacMode =
5449 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5450 }
5451 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005452 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005453
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005454 // extract enableZsl info
5455 if (gExposeEnableZslKey) {
5456 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5457 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5458 mZslEnabled = pendingRequest.enableZsl;
5459 } else {
5460 pendingRequest.enableZsl = mZslEnabled;
5461 }
5462 }
5463
Thierry Strudel3d639192016-09-09 11:52:26 -07005464 PendingBuffersInRequest bufsForCurRequest;
5465 bufsForCurRequest.frame_number = frameNumber;
5466 // Mark current timestamp for the new request
5467 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005468 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005469
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005470 if (hdrPlusRequest) {
5471 // Save settings for this request.
5472 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5473 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5474
5475 // Add to pending HDR+ request queue.
5476 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5477 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5478
5479 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5480 }
5481
Thierry Strudel3d639192016-09-09 11:52:26 -07005482 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005483 if ((request->output_buffers[i].stream->data_space ==
5484 HAL_DATASPACE_DEPTH) &&
5485 (HAL_PIXEL_FORMAT_BLOB ==
5486 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005487 continue;
5488 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005489 RequestedBufferInfo requestedBuf;
5490 memset(&requestedBuf, 0, sizeof(requestedBuf));
5491 requestedBuf.stream = request->output_buffers[i].stream;
5492 requestedBuf.buffer = NULL;
5493 pendingRequest.buffers.push_back(requestedBuf);
5494
5495 // Add to buffer handle the pending buffers list
5496 PendingBufferInfo bufferInfo;
5497 bufferInfo.buffer = request->output_buffers[i].buffer;
5498 bufferInfo.stream = request->output_buffers[i].stream;
5499 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5500 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5501 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5502 frameNumber, bufferInfo.buffer,
5503 channel->getStreamTypeMask(), bufferInfo.stream->format);
5504 }
5505 // Add this request packet into mPendingBuffersMap
5506 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5507 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5508 mPendingBuffersMap.get_num_overall_buffers());
5509
5510 latestRequest = mPendingRequestsList.insert(
5511 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005512
5513 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5514 // for the frame number.
5515 mShutterDispatcher.expectShutter(frameNumber);
5516 for (size_t i = 0; i < request->num_output_buffers; i++) {
5517 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5518 }
5519
Thierry Strudel3d639192016-09-09 11:52:26 -07005520 if(mFlush) {
5521 LOGI("mFlush is true");
5522 pthread_mutex_unlock(&mMutex);
5523 return NO_ERROR;
5524 }
5525
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005526 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5527 // channel.
5528 if (!hdrPlusRequest) {
5529 int indexUsed;
5530 // Notify metadata channel we receive a request
5531 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005532
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005533 if(request->input_buffer != NULL){
5534 LOGD("Input request, frame_number %d", frameNumber);
5535 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5536 if (NO_ERROR != rc) {
5537 LOGE("fail to set reproc parameters");
5538 pthread_mutex_unlock(&mMutex);
5539 return rc;
5540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005541 }
5542
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005543 // Call request on other streams
5544 uint32_t streams_need_metadata = 0;
5545 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5546 for (size_t i = 0; i < request->num_output_buffers; i++) {
5547 const camera3_stream_buffer_t& output = request->output_buffers[i];
5548 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5549
5550 if (channel == NULL) {
5551 LOGW("invalid channel pointer for stream");
5552 continue;
5553 }
5554
5555 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5556 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5557 output.buffer, request->input_buffer, frameNumber);
5558 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005559 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005560 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5561 if (rc < 0) {
5562 LOGE("Fail to request on picture channel");
5563 pthread_mutex_unlock(&mMutex);
5564 return rc;
5565 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005566 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005567 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5568 assert(NULL != mDepthChannel);
5569 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005570
Emilian Peev7650c122017-01-19 08:24:33 -08005571 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5572 if (rc < 0) {
5573 LOGE("Fail to map on depth buffer");
5574 pthread_mutex_unlock(&mMutex);
5575 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005576 }
Emilian Peev7650c122017-01-19 08:24:33 -08005577 } else {
5578 LOGD("snapshot request with buffer %p, frame_number %d",
5579 output.buffer, frameNumber);
5580 if (!request->settings) {
5581 rc = channel->request(output.buffer, frameNumber,
5582 NULL, mPrevParameters, indexUsed);
5583 } else {
5584 rc = channel->request(output.buffer, frameNumber,
5585 NULL, mParameters, indexUsed);
5586 }
5587 if (rc < 0) {
5588 LOGE("Fail to request on picture channel");
5589 pthread_mutex_unlock(&mMutex);
5590 return rc;
5591 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005592
Emilian Peev7650c122017-01-19 08:24:33 -08005593 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5594 uint32_t j = 0;
5595 for (j = 0; j < streamsArray.num_streams; j++) {
5596 if (streamsArray.stream_request[j].streamID == streamId) {
5597 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5598 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5599 else
5600 streamsArray.stream_request[j].buf_index = indexUsed;
5601 break;
5602 }
5603 }
5604 if (j == streamsArray.num_streams) {
5605 LOGE("Did not find matching stream to update index");
5606 assert(0);
5607 }
5608
5609 pendingBufferIter->need_metadata = true;
5610 streams_need_metadata++;
5611 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005612 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005613 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5614 bool needMetadata = false;
5615 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5616 rc = yuvChannel->request(output.buffer, frameNumber,
5617 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5618 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005619 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005620 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005621 pthread_mutex_unlock(&mMutex);
5622 return rc;
5623 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005624
5625 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5626 uint32_t j = 0;
5627 for (j = 0; j < streamsArray.num_streams; j++) {
5628 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005629 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5630 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5631 else
5632 streamsArray.stream_request[j].buf_index = indexUsed;
5633 break;
5634 }
5635 }
5636 if (j == streamsArray.num_streams) {
5637 LOGE("Did not find matching stream to update index");
5638 assert(0);
5639 }
5640
5641 pendingBufferIter->need_metadata = needMetadata;
5642 if (needMetadata)
5643 streams_need_metadata += 1;
5644 LOGD("calling YUV channel request, need_metadata is %d",
5645 needMetadata);
5646 } else {
5647 LOGD("request with buffer %p, frame_number %d",
5648 output.buffer, frameNumber);
5649
5650 rc = channel->request(output.buffer, frameNumber, indexUsed);
5651
5652 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5653 uint32_t j = 0;
5654 for (j = 0; j < streamsArray.num_streams; j++) {
5655 if (streamsArray.stream_request[j].streamID == streamId) {
5656 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5657 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5658 else
5659 streamsArray.stream_request[j].buf_index = indexUsed;
5660 break;
5661 }
5662 }
5663 if (j == streamsArray.num_streams) {
5664 LOGE("Did not find matching stream to update index");
5665 assert(0);
5666 }
5667
5668 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5669 && mBatchSize) {
5670 mToBeQueuedVidBufs++;
5671 if (mToBeQueuedVidBufs == mBatchSize) {
5672 channel->queueBatchBuf();
5673 }
5674 }
5675 if (rc < 0) {
5676 LOGE("request failed");
5677 pthread_mutex_unlock(&mMutex);
5678 return rc;
5679 }
5680 }
5681 pendingBufferIter++;
5682 }
5683
5684 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5685 itr++) {
5686 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5687
5688 if (channel == NULL) {
5689 LOGE("invalid channel pointer for stream");
5690 assert(0);
5691 return BAD_VALUE;
5692 }
5693
5694 InternalRequest requestedStream;
5695 requestedStream = (*itr);
5696
5697
5698 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5699 LOGD("snapshot request internally input buffer %p, frame_number %d",
5700 request->input_buffer, frameNumber);
5701 if(request->input_buffer != NULL){
5702 rc = channel->request(NULL, frameNumber,
5703 pInputBuffer, &mReprocMeta, indexUsed, true,
5704 requestedStream.meteringOnly);
5705 if (rc < 0) {
5706 LOGE("Fail to request on picture channel");
5707 pthread_mutex_unlock(&mMutex);
5708 return rc;
5709 }
5710 } else {
5711 LOGD("snapshot request with frame_number %d", frameNumber);
5712 if (!request->settings) {
5713 rc = channel->request(NULL, frameNumber,
5714 NULL, mPrevParameters, indexUsed, true,
5715 requestedStream.meteringOnly);
5716 } else {
5717 rc = channel->request(NULL, frameNumber,
5718 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5719 }
5720 if (rc < 0) {
5721 LOGE("Fail to request on picture channel");
5722 pthread_mutex_unlock(&mMutex);
5723 return rc;
5724 }
5725
5726 if ((*itr).meteringOnly != 1) {
5727 requestedStream.need_metadata = 1;
5728 streams_need_metadata++;
5729 }
5730 }
5731
5732 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5733 uint32_t j = 0;
5734 for (j = 0; j < streamsArray.num_streams; j++) {
5735 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005736 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5737 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5738 else
5739 streamsArray.stream_request[j].buf_index = indexUsed;
5740 break;
5741 }
5742 }
5743 if (j == streamsArray.num_streams) {
5744 LOGE("Did not find matching stream to update index");
5745 assert(0);
5746 }
5747
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005748 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005749 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005750 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005751 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005752 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005753 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005754 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005755
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005756 //If 2 streams have need_metadata set to true, fail the request, unless
5757 //we copy/reference count the metadata buffer
5758 if (streams_need_metadata > 1) {
5759 LOGE("not supporting request in which two streams requires"
5760 " 2 HAL metadata for reprocessing");
5761 pthread_mutex_unlock(&mMutex);
5762 return -EINVAL;
5763 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005764
Emilian Peev666f5142017-06-02 16:47:04 +01005765 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5766 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5767 if (depthRequestPresent && mDepthChannel) {
5768 if (request->settings) {
5769 camera_metadata_ro_entry entry;
5770 if (find_camera_metadata_ro_entry(request->settings,
5771 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5772 if (entry.data.u8[0]) {
5773 pdafEnable = CAM_PD_DATA_ENABLED;
5774 } else {
5775 pdafEnable = CAM_PD_DATA_SKIP;
5776 }
5777 mDepthCloudMode = pdafEnable;
5778 } else {
5779 pdafEnable = mDepthCloudMode;
5780 }
5781 } else {
5782 pdafEnable = mDepthCloudMode;
5783 }
5784 }
5785
Emilian Peev7650c122017-01-19 08:24:33 -08005786 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5787 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5788 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5789 pthread_mutex_unlock(&mMutex);
5790 return BAD_VALUE;
5791 }
Emilian Peev666f5142017-06-02 16:47:04 +01005792
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005793 if (request->input_buffer == NULL) {
5794 /* Set the parameters to backend:
5795 * - For every request in NORMAL MODE
5796 * - For every request in HFR mode during preview only case
5797 * - Once every batch in HFR mode during video recording
5798 */
5799 if (!mBatchSize ||
5800 (mBatchSize && !isVidBufRequested) ||
5801 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5802 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5803 mBatchSize, isVidBufRequested,
5804 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005805
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005806 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5807 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5808 uint32_t m = 0;
5809 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5810 if (streamsArray.stream_request[k].streamID ==
5811 mBatchedStreamsArray.stream_request[m].streamID)
5812 break;
5813 }
5814 if (m == mBatchedStreamsArray.num_streams) {
5815 mBatchedStreamsArray.stream_request\
5816 [mBatchedStreamsArray.num_streams].streamID =
5817 streamsArray.stream_request[k].streamID;
5818 mBatchedStreamsArray.stream_request\
5819 [mBatchedStreamsArray.num_streams].buf_index =
5820 streamsArray.stream_request[k].buf_index;
5821 mBatchedStreamsArray.num_streams =
5822 mBatchedStreamsArray.num_streams + 1;
5823 }
5824 }
5825 streamsArray = mBatchedStreamsArray;
5826 }
5827 /* Update stream id of all the requested buffers */
5828 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5829 streamsArray)) {
5830 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005831 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005832 return BAD_VALUE;
5833 }
5834
5835 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5836 mParameters);
5837 if (rc < 0) {
5838 LOGE("set_parms failed");
5839 }
5840 /* reset to zero coz, the batch is queued */
5841 mToBeQueuedVidBufs = 0;
5842 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5843 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5844 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005845 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5846 uint32_t m = 0;
5847 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5848 if (streamsArray.stream_request[k].streamID ==
5849 mBatchedStreamsArray.stream_request[m].streamID)
5850 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005851 }
5852 if (m == mBatchedStreamsArray.num_streams) {
5853 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5854 streamID = streamsArray.stream_request[k].streamID;
5855 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5856 buf_index = streamsArray.stream_request[k].buf_index;
5857 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5858 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005859 }
5860 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005861 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005862
5863 // Start all streams after the first setting is sent, so that the
5864 // setting can be applied sooner: (0 + apply_delay)th frame.
5865 if (mState == CONFIGURED && mChannelHandle) {
5866 //Then start them.
5867 LOGH("Start META Channel");
5868 rc = mMetadataChannel->start();
5869 if (rc < 0) {
5870 LOGE("META channel start failed");
5871 pthread_mutex_unlock(&mMutex);
5872 return rc;
5873 }
5874
5875 if (mAnalysisChannel) {
5876 rc = mAnalysisChannel->start();
5877 if (rc < 0) {
5878 LOGE("Analysis channel start failed");
5879 mMetadataChannel->stop();
5880 pthread_mutex_unlock(&mMutex);
5881 return rc;
5882 }
5883 }
5884
5885 if (mSupportChannel) {
5886 rc = mSupportChannel->start();
5887 if (rc < 0) {
5888 LOGE("Support channel start failed");
5889 mMetadataChannel->stop();
5890 /* Although support and analysis are mutually exclusive today
5891 adding it in anycase for future proofing */
5892 if (mAnalysisChannel) {
5893 mAnalysisChannel->stop();
5894 }
5895 pthread_mutex_unlock(&mMutex);
5896 return rc;
5897 }
5898 }
5899 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5900 it != mStreamInfo.end(); it++) {
5901 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5902 LOGH("Start Processing Channel mask=%d",
5903 channel->getStreamTypeMask());
5904 rc = channel->start();
5905 if (rc < 0) {
5906 LOGE("channel start failed");
5907 pthread_mutex_unlock(&mMutex);
5908 return rc;
5909 }
5910 }
5911
5912 if (mRawDumpChannel) {
5913 LOGD("Starting raw dump stream");
5914 rc = mRawDumpChannel->start();
5915 if (rc != NO_ERROR) {
5916 LOGE("Error Starting Raw Dump Channel");
5917 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5918 it != mStreamInfo.end(); it++) {
5919 QCamera3Channel *channel =
5920 (QCamera3Channel *)(*it)->stream->priv;
5921 LOGH("Stopping Processing Channel mask=%d",
5922 channel->getStreamTypeMask());
5923 channel->stop();
5924 }
5925 if (mSupportChannel)
5926 mSupportChannel->stop();
5927 if (mAnalysisChannel) {
5928 mAnalysisChannel->stop();
5929 }
5930 mMetadataChannel->stop();
5931 pthread_mutex_unlock(&mMutex);
5932 return rc;
5933 }
5934 }
5935
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005936 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005937 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005938 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005939 if (rc != NO_ERROR) {
5940 LOGE("start_channel failed %d", rc);
5941 pthread_mutex_unlock(&mMutex);
5942 return rc;
5943 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005944
5945 {
5946 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005947 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005948
5949 // Now that sensor mode should have been selected, get the selected sensor mode
5950 // info.
5951 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5952 getCurrentSensorModeInfo(mSensorModeInfo);
5953
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005954 if (EaselManagerClientOpened) {
5955 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chena6c99062017-05-23 13:45:06 -07005956 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5957 /*enableIpu*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005958 if (rc != OK) {
5959 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5960 mCameraId, mSensorModeInfo.op_pixel_clk);
5961 pthread_mutex_unlock(&mMutex);
5962 return rc;
5963 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005964 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005965 }
5966 }
5967
5968 // Start sensor streaming.
5969 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5970 mChannelHandle);
5971 if (rc != NO_ERROR) {
5972 LOGE("start_sensor_stream_on failed %d", rc);
5973 pthread_mutex_unlock(&mMutex);
5974 return rc;
5975 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005976 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005977 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005978 }
5979
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005980 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00005981 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005982 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005983 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5984 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5985 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5986 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5987 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5988 rc = enableHdrPlusModeLocked();
5989 if (rc != OK) {
5990 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5991 pthread_mutex_unlock(&mMutex);
5992 return rc;
5993 }
5994
5995 mFirstPreviewIntentSeen = true;
5996 }
5997 }
5998
Thierry Strudel3d639192016-09-09 11:52:26 -07005999 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6000
6001 mState = STARTED;
6002 // Added a timed condition wait
6003 struct timespec ts;
6004 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006005 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006006 if (rc < 0) {
6007 isValidTimeout = 0;
6008 LOGE("Error reading the real time clock!!");
6009 }
6010 else {
6011 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006012 int64_t timeout = 5;
6013 {
6014 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6015 // If there is a pending HDR+ request, the following requests may be blocked until the
6016 // HDR+ request is done. So allow a longer timeout.
6017 if (mHdrPlusPendingRequests.size() > 0) {
6018 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6019 }
6020 }
6021 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006022 }
6023 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006024 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006025 (mState != ERROR) && (mState != DEINIT)) {
6026 if (!isValidTimeout) {
6027 LOGD("Blocking on conditional wait");
6028 pthread_cond_wait(&mRequestCond, &mMutex);
6029 }
6030 else {
6031 LOGD("Blocking on timed conditional wait");
6032 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6033 if (rc == ETIMEDOUT) {
6034 rc = -ENODEV;
6035 LOGE("Unblocked on timeout!!!!");
6036 break;
6037 }
6038 }
6039 LOGD("Unblocked");
6040 if (mWokenUpByDaemon) {
6041 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006042 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006043 break;
6044 }
6045 }
6046 pthread_mutex_unlock(&mMutex);
6047
6048 return rc;
6049}
6050
6051/*===========================================================================
6052 * FUNCTION : dump
6053 *
6054 * DESCRIPTION:
6055 *
6056 * PARAMETERS :
6057 *
6058 *
6059 * RETURN :
6060 *==========================================================================*/
6061void QCamera3HardwareInterface::dump(int fd)
6062{
6063 pthread_mutex_lock(&mMutex);
6064 dprintf(fd, "\n Camera HAL3 information Begin \n");
6065
6066 dprintf(fd, "\nNumber of pending requests: %zu \n",
6067 mPendingRequestsList.size());
6068 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6069 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6070 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6071 for(pendingRequestIterator i = mPendingRequestsList.begin();
6072 i != mPendingRequestsList.end(); i++) {
6073 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6074 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6075 i->input_buffer);
6076 }
6077 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6078 mPendingBuffersMap.get_num_overall_buffers());
6079 dprintf(fd, "-------+------------------\n");
6080 dprintf(fd, " Frame | Stream type mask \n");
6081 dprintf(fd, "-------+------------------\n");
6082 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6083 for(auto &j : req.mPendingBufferList) {
6084 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6085 dprintf(fd, " %5d | %11d \n",
6086 req.frame_number, channel->getStreamTypeMask());
6087 }
6088 }
6089 dprintf(fd, "-------+------------------\n");
6090
6091 dprintf(fd, "\nPending frame drop list: %zu\n",
6092 mPendingFrameDropList.size());
6093 dprintf(fd, "-------+-----------\n");
6094 dprintf(fd, " Frame | Stream ID \n");
6095 dprintf(fd, "-------+-----------\n");
6096 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6097 i != mPendingFrameDropList.end(); i++) {
6098 dprintf(fd, " %5d | %9d \n",
6099 i->frame_number, i->stream_ID);
6100 }
6101 dprintf(fd, "-------+-----------\n");
6102
6103 dprintf(fd, "\n Camera HAL3 information End \n");
6104
6105 /* use dumpsys media.camera as trigger to send update debug level event */
6106 mUpdateDebugLevel = true;
6107 pthread_mutex_unlock(&mMutex);
6108 return;
6109}
6110
6111/*===========================================================================
6112 * FUNCTION : flush
6113 *
6114 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6115 * conditionally restarts channels
6116 *
6117 * PARAMETERS :
6118 * @ restartChannels: re-start all channels
6119 *
6120 *
6121 * RETURN :
6122 * 0 on success
6123 * Error code on failure
6124 *==========================================================================*/
6125int QCamera3HardwareInterface::flush(bool restartChannels)
6126{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006127 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006128 int32_t rc = NO_ERROR;
6129
6130 LOGD("Unblocking Process Capture Request");
6131 pthread_mutex_lock(&mMutex);
6132 mFlush = true;
6133 pthread_mutex_unlock(&mMutex);
6134
6135 rc = stopAllChannels();
6136 // unlink of dualcam
6137 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006138 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6139 &m_pDualCamCmdPtr->bundle_info;
6140 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006141 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6142 pthread_mutex_lock(&gCamLock);
6143
6144 if (mIsMainCamera == 1) {
6145 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6146 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006147 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006148 // related session id should be session id of linked session
6149 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6150 } else {
6151 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6152 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006153 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006154 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6155 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006156 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006157 pthread_mutex_unlock(&gCamLock);
6158
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006159 rc = mCameraHandle->ops->set_dual_cam_cmd(
6160 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006161 if (rc < 0) {
6162 LOGE("Dualcam: Unlink failed, but still proceed to close");
6163 }
6164 }
6165
6166 if (rc < 0) {
6167 LOGE("stopAllChannels failed");
6168 return rc;
6169 }
6170 if (mChannelHandle) {
6171 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6172 mChannelHandle);
6173 }
6174
6175 // Reset bundle info
6176 rc = setBundleInfo();
6177 if (rc < 0) {
6178 LOGE("setBundleInfo failed %d", rc);
6179 return rc;
6180 }
6181
6182 // Mutex Lock
6183 pthread_mutex_lock(&mMutex);
6184
6185 // Unblock process_capture_request
6186 mPendingLiveRequest = 0;
6187 pthread_cond_signal(&mRequestCond);
6188
6189 rc = notifyErrorForPendingRequests();
6190 if (rc < 0) {
6191 LOGE("notifyErrorForPendingRequests failed");
6192 pthread_mutex_unlock(&mMutex);
6193 return rc;
6194 }
6195
6196 mFlush = false;
6197
6198 // Start the Streams/Channels
6199 if (restartChannels) {
6200 rc = startAllChannels();
6201 if (rc < 0) {
6202 LOGE("startAllChannels failed");
6203 pthread_mutex_unlock(&mMutex);
6204 return rc;
6205 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006206 if (mChannelHandle) {
6207 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006208 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006209 if (rc < 0) {
6210 LOGE("start_channel failed");
6211 pthread_mutex_unlock(&mMutex);
6212 return rc;
6213 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006214 }
6215 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006216 pthread_mutex_unlock(&mMutex);
6217
6218 return 0;
6219}
6220
6221/*===========================================================================
6222 * FUNCTION : flushPerf
6223 *
6224 * DESCRIPTION: This is the performance optimization version of flush that does
6225 * not use stream off, rather flushes the system
6226 *
6227 * PARAMETERS :
6228 *
6229 *
6230 * RETURN : 0 : success
6231 * -EINVAL: input is malformed (device is not valid)
6232 * -ENODEV: if the device has encountered a serious error
6233 *==========================================================================*/
6234int QCamera3HardwareInterface::flushPerf()
6235{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006236 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006237 int32_t rc = 0;
6238 struct timespec timeout;
6239 bool timed_wait = false;
6240
6241 pthread_mutex_lock(&mMutex);
6242 mFlushPerf = true;
6243 mPendingBuffersMap.numPendingBufsAtFlush =
6244 mPendingBuffersMap.get_num_overall_buffers();
6245 LOGD("Calling flush. Wait for %d buffers to return",
6246 mPendingBuffersMap.numPendingBufsAtFlush);
6247
6248 /* send the flush event to the backend */
6249 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6250 if (rc < 0) {
6251 LOGE("Error in flush: IOCTL failure");
6252 mFlushPerf = false;
6253 pthread_mutex_unlock(&mMutex);
6254 return -ENODEV;
6255 }
6256
6257 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6258 LOGD("No pending buffers in HAL, return flush");
6259 mFlushPerf = false;
6260 pthread_mutex_unlock(&mMutex);
6261 return rc;
6262 }
6263
6264 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006265 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006266 if (rc < 0) {
6267 LOGE("Error reading the real time clock, cannot use timed wait");
6268 } else {
6269 timeout.tv_sec += FLUSH_TIMEOUT;
6270 timed_wait = true;
6271 }
6272
6273 //Block on conditional variable
6274 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6275 LOGD("Waiting on mBuffersCond");
6276 if (!timed_wait) {
6277 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6278 if (rc != 0) {
6279 LOGE("pthread_cond_wait failed due to rc = %s",
6280 strerror(rc));
6281 break;
6282 }
6283 } else {
6284 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6285 if (rc != 0) {
6286 LOGE("pthread_cond_timedwait failed due to rc = %s",
6287 strerror(rc));
6288 break;
6289 }
6290 }
6291 }
6292 if (rc != 0) {
6293 mFlushPerf = false;
6294 pthread_mutex_unlock(&mMutex);
6295 return -ENODEV;
6296 }
6297
6298 LOGD("Received buffers, now safe to return them");
6299
6300 //make sure the channels handle flush
6301 //currently only required for the picture channel to release snapshot resources
6302 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6303 it != mStreamInfo.end(); it++) {
6304 QCamera3Channel *channel = (*it)->channel;
6305 if (channel) {
6306 rc = channel->flush();
6307 if (rc) {
6308 LOGE("Flushing the channels failed with error %d", rc);
6309 // even though the channel flush failed we need to continue and
6310 // return the buffers we have to the framework, however the return
6311 // value will be an error
6312 rc = -ENODEV;
6313 }
6314 }
6315 }
6316
6317 /* notify the frameworks and send errored results */
6318 rc = notifyErrorForPendingRequests();
6319 if (rc < 0) {
6320 LOGE("notifyErrorForPendingRequests failed");
6321 pthread_mutex_unlock(&mMutex);
6322 return rc;
6323 }
6324
6325 //unblock process_capture_request
6326 mPendingLiveRequest = 0;
6327 unblockRequestIfNecessary();
6328
6329 mFlushPerf = false;
6330 pthread_mutex_unlock(&mMutex);
6331 LOGD ("Flush Operation complete. rc = %d", rc);
6332 return rc;
6333}
6334
6335/*===========================================================================
6336 * FUNCTION : handleCameraDeviceError
6337 *
6338 * DESCRIPTION: This function calls internal flush and notifies the error to
6339 * framework and updates the state variable.
6340 *
6341 * PARAMETERS : None
6342 *
6343 * RETURN : NO_ERROR on Success
6344 * Error code on failure
6345 *==========================================================================*/
6346int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6347{
6348 int32_t rc = NO_ERROR;
6349
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006350 {
6351 Mutex::Autolock lock(mFlushLock);
6352 pthread_mutex_lock(&mMutex);
6353 if (mState != ERROR) {
6354 //if mState != ERROR, nothing to be done
6355 pthread_mutex_unlock(&mMutex);
6356 return NO_ERROR;
6357 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006358 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006359
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006360 rc = flush(false /* restart channels */);
6361 if (NO_ERROR != rc) {
6362 LOGE("internal flush to handle mState = ERROR failed");
6363 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006364
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006365 pthread_mutex_lock(&mMutex);
6366 mState = DEINIT;
6367 pthread_mutex_unlock(&mMutex);
6368 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006369
6370 camera3_notify_msg_t notify_msg;
6371 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6372 notify_msg.type = CAMERA3_MSG_ERROR;
6373 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6374 notify_msg.message.error.error_stream = NULL;
6375 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006376 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006377
6378 return rc;
6379}
6380
6381/*===========================================================================
6382 * FUNCTION : captureResultCb
6383 *
6384 * DESCRIPTION: Callback handler for all capture result
6385 * (streams, as well as metadata)
6386 *
6387 * PARAMETERS :
6388 * @metadata : metadata information
6389 * @buffer : actual gralloc buffer to be returned to frameworks.
6390 * NULL if metadata.
6391 *
6392 * RETURN : NONE
6393 *==========================================================================*/
6394void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6395 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6396{
6397 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006398 pthread_mutex_lock(&mMutex);
6399 uint8_t batchSize = mBatchSize;
6400 pthread_mutex_unlock(&mMutex);
6401 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006402 handleBatchMetadata(metadata_buf,
6403 true /* free_and_bufdone_meta_buf */);
6404 } else { /* mBatchSize = 0 */
6405 hdrPlusPerfLock(metadata_buf);
6406 pthread_mutex_lock(&mMutex);
6407 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006408 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006409 true /* last urgent frame of batch metadata */,
6410 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006411 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006412 pthread_mutex_unlock(&mMutex);
6413 }
6414 } else if (isInputBuffer) {
6415 pthread_mutex_lock(&mMutex);
6416 handleInputBufferWithLock(frame_number);
6417 pthread_mutex_unlock(&mMutex);
6418 } else {
6419 pthread_mutex_lock(&mMutex);
6420 handleBufferWithLock(buffer, frame_number);
6421 pthread_mutex_unlock(&mMutex);
6422 }
6423 return;
6424}
6425
6426/*===========================================================================
6427 * FUNCTION : getReprocessibleOutputStreamId
6428 *
6429 * DESCRIPTION: Get source output stream id for the input reprocess stream
6430 * based on size and format, which would be the largest
6431 * output stream if an input stream exists.
6432 *
6433 * PARAMETERS :
6434 * @id : return the stream id if found
6435 *
6436 * RETURN : int32_t type of status
6437 * NO_ERROR -- success
6438 * none-zero failure code
6439 *==========================================================================*/
6440int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6441{
6442 /* check if any output or bidirectional stream with the same size and format
6443 and return that stream */
6444 if ((mInputStreamInfo.dim.width > 0) &&
6445 (mInputStreamInfo.dim.height > 0)) {
6446 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6447 it != mStreamInfo.end(); it++) {
6448
6449 camera3_stream_t *stream = (*it)->stream;
6450 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6451 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6452 (stream->format == mInputStreamInfo.format)) {
6453 // Usage flag for an input stream and the source output stream
6454 // may be different.
6455 LOGD("Found reprocessible output stream! %p", *it);
6456 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6457 stream->usage, mInputStreamInfo.usage);
6458
6459 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6460 if (channel != NULL && channel->mStreams[0]) {
6461 id = channel->mStreams[0]->getMyServerID();
6462 return NO_ERROR;
6463 }
6464 }
6465 }
6466 } else {
6467 LOGD("No input stream, so no reprocessible output stream");
6468 }
6469 return NAME_NOT_FOUND;
6470}
6471
6472/*===========================================================================
6473 * FUNCTION : lookupFwkName
6474 *
6475 * DESCRIPTION: In case the enum is not same in fwk and backend
6476 * make sure the parameter is correctly propogated
6477 *
6478 * PARAMETERS :
6479 * @arr : map between the two enums
6480 * @len : len of the map
6481 * @hal_name : name of the hal_parm to map
6482 *
6483 * RETURN : int type of status
6484 * fwk_name -- success
6485 * none-zero failure code
6486 *==========================================================================*/
6487template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6488 size_t len, halType hal_name)
6489{
6490
6491 for (size_t i = 0; i < len; i++) {
6492 if (arr[i].hal_name == hal_name) {
6493 return arr[i].fwk_name;
6494 }
6495 }
6496
6497 /* Not able to find matching framework type is not necessarily
6498 * an error case. This happens when mm-camera supports more attributes
6499 * than the frameworks do */
6500 LOGH("Cannot find matching framework type");
6501 return NAME_NOT_FOUND;
6502}
6503
6504/*===========================================================================
6505 * FUNCTION : lookupHalName
6506 *
6507 * DESCRIPTION: In case the enum is not same in fwk and backend
6508 * make sure the parameter is correctly propogated
6509 *
6510 * PARAMETERS :
6511 * @arr : map between the two enums
6512 * @len : len of the map
6513 * @fwk_name : name of the hal_parm to map
6514 *
6515 * RETURN : int32_t type of status
6516 * hal_name -- success
6517 * none-zero failure code
6518 *==========================================================================*/
6519template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6520 size_t len, fwkType fwk_name)
6521{
6522 for (size_t i = 0; i < len; i++) {
6523 if (arr[i].fwk_name == fwk_name) {
6524 return arr[i].hal_name;
6525 }
6526 }
6527
6528 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6529 return NAME_NOT_FOUND;
6530}
6531
6532/*===========================================================================
6533 * FUNCTION : lookupProp
6534 *
6535 * DESCRIPTION: lookup a value by its name
6536 *
6537 * PARAMETERS :
6538 * @arr : map between the two enums
6539 * @len : size of the map
6540 * @name : name to be looked up
6541 *
6542 * RETURN : Value if found
6543 * CAM_CDS_MODE_MAX if not found
6544 *==========================================================================*/
6545template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6546 size_t len, const char *name)
6547{
6548 if (name) {
6549 for (size_t i = 0; i < len; i++) {
6550 if (!strcmp(arr[i].desc, name)) {
6551 return arr[i].val;
6552 }
6553 }
6554 }
6555 return CAM_CDS_MODE_MAX;
6556}
6557
6558/*===========================================================================
6559 *
6560 * DESCRIPTION:
6561 *
6562 * PARAMETERS :
6563 * @metadata : metadata information from callback
6564 * @timestamp: metadata buffer timestamp
6565 * @request_id: request id
6566 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006567 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006568 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6569 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006570 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006571 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6572 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006573 *
6574 * RETURN : camera_metadata_t*
6575 * metadata in a format specified by fwk
6576 *==========================================================================*/
6577camera_metadata_t*
6578QCamera3HardwareInterface::translateFromHalMetadata(
6579 metadata_buffer_t *metadata,
6580 nsecs_t timestamp,
6581 int32_t request_id,
6582 const CameraMetadata& jpegMetadata,
6583 uint8_t pipeline_depth,
6584 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006585 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006586 /* DevCamDebug metadata translateFromHalMetadata argument */
6587 uint8_t DevCamDebug_meta_enable,
6588 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006589 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006590 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006591 bool lastMetadataInBatch,
6592 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006593{
6594 CameraMetadata camMetadata;
6595 camera_metadata_t *resultMetadata;
6596
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006597 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006598 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6599 * Timestamp is needed because it's used for shutter notify calculation.
6600 * */
6601 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6602 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006603 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006604 }
6605
Thierry Strudel3d639192016-09-09 11:52:26 -07006606 if (jpegMetadata.entryCount())
6607 camMetadata.append(jpegMetadata);
6608
6609 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6610 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6611 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6612 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006613 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006614 if (mBatchSize == 0) {
6615 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6616 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6617 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006618
Samuel Ha68ba5172016-12-15 18:41:12 -08006619 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6620 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6621 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6622 // DevCamDebug metadata translateFromHalMetadata AF
6623 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6624 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6625 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6626 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6627 }
6628 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6629 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6630 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6631 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6632 }
6633 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6634 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6635 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6636 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6637 }
6638 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6639 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6640 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6641 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6642 }
6643 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6644 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6645 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6646 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6647 }
6648 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6649 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6650 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6651 *DevCamDebug_af_monitor_pdaf_target_pos;
6652 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6653 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6654 }
6655 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6656 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6657 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6658 *DevCamDebug_af_monitor_pdaf_confidence;
6659 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6660 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6661 }
6662 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6663 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6664 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6665 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6666 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6667 }
6668 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6669 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6670 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6671 *DevCamDebug_af_monitor_tof_target_pos;
6672 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6673 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6674 }
6675 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6676 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6677 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6678 *DevCamDebug_af_monitor_tof_confidence;
6679 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6680 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6681 }
6682 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6683 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6684 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6685 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6686 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6687 }
6688 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6689 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6690 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6691 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6692 &fwk_DevCamDebug_af_monitor_type_select, 1);
6693 }
6694 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6695 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6696 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6697 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6698 &fwk_DevCamDebug_af_monitor_refocus, 1);
6699 }
6700 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6701 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6702 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6703 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6704 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6705 }
6706 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6707 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6708 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6709 *DevCamDebug_af_search_pdaf_target_pos;
6710 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6711 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6712 }
6713 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6714 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6715 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6716 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6717 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6718 }
6719 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6720 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6721 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6722 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6723 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6724 }
6725 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6726 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6727 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6728 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6729 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6730 }
6731 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6732 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6733 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6734 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6735 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6736 }
6737 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6738 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6739 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6740 *DevCamDebug_af_search_tof_target_pos;
6741 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6742 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6743 }
6744 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6745 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6746 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6747 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6748 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6749 }
6750 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6751 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6752 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6753 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6754 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6755 }
6756 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6757 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6758 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6759 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6760 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6761 }
6762 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6763 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6764 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6765 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6766 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6767 }
6768 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6769 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6770 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6771 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6772 &fwk_DevCamDebug_af_search_type_select, 1);
6773 }
6774 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6775 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6776 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6777 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6778 &fwk_DevCamDebug_af_search_next_pos, 1);
6779 }
6780 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6781 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6782 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6783 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6784 &fwk_DevCamDebug_af_search_target_pos, 1);
6785 }
6786 // DevCamDebug metadata translateFromHalMetadata AEC
6787 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6788 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6789 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6790 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6791 }
6792 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6793 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6794 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6795 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6796 }
6797 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6798 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6799 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6800 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6801 }
6802 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6803 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6804 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6805 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6806 }
6807 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6808 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6809 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6810 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6811 }
6812 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6813 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6814 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6815 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6816 }
6817 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6818 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6819 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6820 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6821 }
6822 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6823 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6824 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6825 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6826 }
Samuel Ha34229982017-02-17 13:51:11 -08006827 // DevCamDebug metadata translateFromHalMetadata zzHDR
6828 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6829 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6830 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6831 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6832 }
6833 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6834 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006835 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006836 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6837 }
6838 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6839 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6840 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6841 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6842 }
6843 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6844 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006845 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006846 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6847 }
6848 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6849 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6850 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6851 *DevCamDebug_aec_hdr_sensitivity_ratio;
6852 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6853 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6854 }
6855 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6856 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6857 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6858 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6859 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6860 }
6861 // DevCamDebug metadata translateFromHalMetadata ADRC
6862 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6863 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6864 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6865 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6866 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6867 }
6868 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6869 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6870 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6871 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6872 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6873 }
6874 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6875 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6876 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6877 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6878 }
6879 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6880 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6881 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6882 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6883 }
6884 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6885 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6886 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6887 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6888 }
6889 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6890 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6891 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6892 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6893 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006894 // DevCamDebug metadata translateFromHalMetadata AWB
6895 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6896 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6897 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6898 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6899 }
6900 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6901 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6902 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6903 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6904 }
6905 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6906 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6907 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6908 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6909 }
6910 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6911 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6912 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6913 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6914 }
6915 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6916 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6917 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6918 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6919 }
6920 }
6921 // atrace_end(ATRACE_TAG_ALWAYS);
6922
Thierry Strudel3d639192016-09-09 11:52:26 -07006923 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6924 int64_t fwk_frame_number = *frame_number;
6925 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6926 }
6927
6928 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6929 int32_t fps_range[2];
6930 fps_range[0] = (int32_t)float_range->min_fps;
6931 fps_range[1] = (int32_t)float_range->max_fps;
6932 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6933 fps_range, 2);
6934 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6935 fps_range[0], fps_range[1]);
6936 }
6937
6938 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6939 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6940 }
6941
6942 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6943 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6944 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6945 *sceneMode);
6946 if (NAME_NOT_FOUND != val) {
6947 uint8_t fwkSceneMode = (uint8_t)val;
6948 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6949 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6950 fwkSceneMode);
6951 }
6952 }
6953
6954 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6955 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6956 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6957 }
6958
6959 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6960 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6961 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6962 }
6963
6964 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6965 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6966 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6967 }
6968
6969 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6970 CAM_INTF_META_EDGE_MODE, metadata) {
6971 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6972 }
6973
6974 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6975 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6976 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6977 }
6978
6979 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6980 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6981 }
6982
6983 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6984 if (0 <= *flashState) {
6985 uint8_t fwk_flashState = (uint8_t) *flashState;
6986 if (!gCamCapability[mCameraId]->flash_available) {
6987 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6988 }
6989 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6990 }
6991 }
6992
6993 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6994 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6995 if (NAME_NOT_FOUND != val) {
6996 uint8_t fwk_flashMode = (uint8_t)val;
6997 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6998 }
6999 }
7000
7001 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7002 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7003 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7004 }
7005
7006 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7007 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7008 }
7009
7010 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7011 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7012 }
7013
7014 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7015 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7016 }
7017
7018 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7019 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7020 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7021 }
7022
7023 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7024 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7025 LOGD("fwk_videoStab = %d", fwk_videoStab);
7026 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7027 } else {
7028 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7029 // and so hardcoding the Video Stab result to OFF mode.
7030 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7031 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007032 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007033 }
7034
7035 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7036 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7037 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7038 }
7039
7040 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7041 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7042 }
7043
Thierry Strudel3d639192016-09-09 11:52:26 -07007044 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7045 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007046 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007047
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007048 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7049 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007050
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007051 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007052 blackLevelAppliedPattern->cam_black_level[0],
7053 blackLevelAppliedPattern->cam_black_level[1],
7054 blackLevelAppliedPattern->cam_black_level[2],
7055 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007056 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7057 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007058
7059#ifndef USE_HAL_3_3
7060 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307061 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007062 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307063 fwk_blackLevelInd[0] /= 16.0;
7064 fwk_blackLevelInd[1] /= 16.0;
7065 fwk_blackLevelInd[2] /= 16.0;
7066 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007067 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7068 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007069#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007070 }
7071
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007072#ifndef USE_HAL_3_3
7073 // Fixed whitelevel is used by ISP/Sensor
7074 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7075 &gCamCapability[mCameraId]->white_level, 1);
7076#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007077
7078 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7079 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7080 int32_t scalerCropRegion[4];
7081 scalerCropRegion[0] = hScalerCropRegion->left;
7082 scalerCropRegion[1] = hScalerCropRegion->top;
7083 scalerCropRegion[2] = hScalerCropRegion->width;
7084 scalerCropRegion[3] = hScalerCropRegion->height;
7085
7086 // Adjust crop region from sensor output coordinate system to active
7087 // array coordinate system.
7088 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7089 scalerCropRegion[2], scalerCropRegion[3]);
7090
7091 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7092 }
7093
7094 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7095 LOGD("sensorExpTime = %lld", *sensorExpTime);
7096 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7097 }
7098
7099 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7100 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7101 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7102 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7103 }
7104
7105 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7106 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7107 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7108 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7109 sensorRollingShutterSkew, 1);
7110 }
7111
7112 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7113 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7114 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7115
7116 //calculate the noise profile based on sensitivity
7117 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7118 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7119 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7120 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7121 noise_profile[i] = noise_profile_S;
7122 noise_profile[i+1] = noise_profile_O;
7123 }
7124 LOGD("noise model entry (S, O) is (%f, %f)",
7125 noise_profile_S, noise_profile_O);
7126 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7127 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7128 }
7129
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007130#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007131 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007133 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007134 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007135 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7136 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7137 }
7138 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007139#endif
7140
Thierry Strudel3d639192016-09-09 11:52:26 -07007141 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7142 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7143 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7144 }
7145
7146 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7147 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7148 *faceDetectMode);
7149 if (NAME_NOT_FOUND != val) {
7150 uint8_t fwk_faceDetectMode = (uint8_t)val;
7151 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7152
7153 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7154 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7155 CAM_INTF_META_FACE_DETECTION, metadata) {
7156 uint8_t numFaces = MIN(
7157 faceDetectionInfo->num_faces_detected, MAX_ROI);
7158 int32_t faceIds[MAX_ROI];
7159 uint8_t faceScores[MAX_ROI];
7160 int32_t faceRectangles[MAX_ROI * 4];
7161 int32_t faceLandmarks[MAX_ROI * 6];
7162 size_t j = 0, k = 0;
7163
7164 for (size_t i = 0; i < numFaces; i++) {
7165 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7166 // Adjust crop region from sensor output coordinate system to active
7167 // array coordinate system.
7168 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7169 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7170 rect.width, rect.height);
7171
7172 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7173 faceRectangles+j, -1);
7174
Jason Lee8ce36fa2017-04-19 19:40:37 -07007175 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7176 "bottom-right (%d, %d)",
7177 faceDetectionInfo->frame_id, i,
7178 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7179 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7180
Thierry Strudel3d639192016-09-09 11:52:26 -07007181 j+= 4;
7182 }
7183 if (numFaces <= 0) {
7184 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7185 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7186 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7187 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7188 }
7189
7190 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7191 numFaces);
7192 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7193 faceRectangles, numFaces * 4U);
7194 if (fwk_faceDetectMode ==
7195 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7196 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7197 CAM_INTF_META_FACE_LANDMARK, metadata) {
7198
7199 for (size_t i = 0; i < numFaces; i++) {
7200 // Map the co-ordinate sensor output coordinate system to active
7201 // array coordinate system.
7202 mCropRegionMapper.toActiveArray(
7203 landmarks->face_landmarks[i].left_eye_center.x,
7204 landmarks->face_landmarks[i].left_eye_center.y);
7205 mCropRegionMapper.toActiveArray(
7206 landmarks->face_landmarks[i].right_eye_center.x,
7207 landmarks->face_landmarks[i].right_eye_center.y);
7208 mCropRegionMapper.toActiveArray(
7209 landmarks->face_landmarks[i].mouth_center.x,
7210 landmarks->face_landmarks[i].mouth_center.y);
7211
7212 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007213
7214 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7215 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7216 faceDetectionInfo->frame_id, i,
7217 faceLandmarks[k + LEFT_EYE_X],
7218 faceLandmarks[k + LEFT_EYE_Y],
7219 faceLandmarks[k + RIGHT_EYE_X],
7220 faceLandmarks[k + RIGHT_EYE_Y],
7221 faceLandmarks[k + MOUTH_X],
7222 faceLandmarks[k + MOUTH_Y]);
7223
Thierry Strudel04e026f2016-10-10 11:27:36 -07007224 k+= TOTAL_LANDMARK_INDICES;
7225 }
7226 } else {
7227 for (size_t i = 0; i < numFaces; i++) {
7228 setInvalidLandmarks(faceLandmarks+k);
7229 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007230 }
7231 }
7232
Jason Lee49619db2017-04-13 12:07:22 -07007233 for (size_t i = 0; i < numFaces; i++) {
7234 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7235
7236 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7237 faceDetectionInfo->frame_id, i, faceIds[i]);
7238 }
7239
Thierry Strudel3d639192016-09-09 11:52:26 -07007240 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7241 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7242 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007243 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007244 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7245 CAM_INTF_META_FACE_BLINK, metadata) {
7246 uint8_t detected[MAX_ROI];
7247 uint8_t degree[MAX_ROI * 2];
7248 for (size_t i = 0; i < numFaces; i++) {
7249 detected[i] = blinks->blink[i].blink_detected;
7250 degree[2 * i] = blinks->blink[i].left_blink;
7251 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007252
Jason Lee49619db2017-04-13 12:07:22 -07007253 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7254 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7255 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7256 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007257 }
7258 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7259 detected, numFaces);
7260 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7261 degree, numFaces * 2);
7262 }
7263 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7264 CAM_INTF_META_FACE_SMILE, metadata) {
7265 uint8_t degree[MAX_ROI];
7266 uint8_t confidence[MAX_ROI];
7267 for (size_t i = 0; i < numFaces; i++) {
7268 degree[i] = smiles->smile[i].smile_degree;
7269 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007270
Jason Lee49619db2017-04-13 12:07:22 -07007271 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7272 "smile_degree=%d, smile_score=%d",
7273 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007274 }
7275 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7276 degree, numFaces);
7277 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7278 confidence, numFaces);
7279 }
7280 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7281 CAM_INTF_META_FACE_GAZE, metadata) {
7282 int8_t angle[MAX_ROI];
7283 int32_t direction[MAX_ROI * 3];
7284 int8_t degree[MAX_ROI * 2];
7285 for (size_t i = 0; i < numFaces; i++) {
7286 angle[i] = gazes->gaze[i].gaze_angle;
7287 direction[3 * i] = gazes->gaze[i].updown_dir;
7288 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7289 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7290 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7291 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007292
7293 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7294 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7295 "left_right_gaze=%d, top_bottom_gaze=%d",
7296 faceDetectionInfo->frame_id, i, angle[i],
7297 direction[3 * i], direction[3 * i + 1],
7298 direction[3 * i + 2],
7299 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007300 }
7301 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7302 (uint8_t *)angle, numFaces);
7303 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7304 direction, numFaces * 3);
7305 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7306 (uint8_t *)degree, numFaces * 2);
7307 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007308 }
7309 }
7310 }
7311 }
7312
7313 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7314 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007315 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007316 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007317 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007318
Shuzhen Wang14415f52016-11-16 18:26:18 -08007319 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7320 histogramBins = *histBins;
7321 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7322 }
7323
7324 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007325 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7326 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007327 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007328
7329 switch (stats_data->type) {
7330 case CAM_HISTOGRAM_TYPE_BAYER:
7331 switch (stats_data->bayer_stats.data_type) {
7332 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007333 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7334 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007335 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007336 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7337 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007338 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007339 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7340 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007341 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007342 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007343 case CAM_STATS_CHANNEL_R:
7344 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007345 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7346 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007347 }
7348 break;
7349 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007350 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007351 break;
7352 }
7353
Shuzhen Wang14415f52016-11-16 18:26:18 -08007354 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007355 }
7356 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007357 }
7358
7359 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7360 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7361 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7362 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7363 }
7364
7365 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7366 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7367 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7368 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7369 }
7370
7371 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7372 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7373 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7374 CAM_MAX_SHADING_MAP_HEIGHT);
7375 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7376 CAM_MAX_SHADING_MAP_WIDTH);
7377 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7378 lensShadingMap->lens_shading, 4U * map_width * map_height);
7379 }
7380
7381 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7382 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7383 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7384 }
7385
7386 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7387 //Populate CAM_INTF_META_TONEMAP_CURVES
7388 /* ch0 = G, ch 1 = B, ch 2 = R*/
7389 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7390 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7391 tonemap->tonemap_points_cnt,
7392 CAM_MAX_TONEMAP_CURVE_SIZE);
7393 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7394 }
7395
7396 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7397 &tonemap->curves[0].tonemap_points[0][0],
7398 tonemap->tonemap_points_cnt * 2);
7399
7400 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7401 &tonemap->curves[1].tonemap_points[0][0],
7402 tonemap->tonemap_points_cnt * 2);
7403
7404 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7405 &tonemap->curves[2].tonemap_points[0][0],
7406 tonemap->tonemap_points_cnt * 2);
7407 }
7408
7409 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7410 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7411 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7412 CC_GAIN_MAX);
7413 }
7414
7415 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7416 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7417 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7418 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7419 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7420 }
7421
7422 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7423 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7424 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7425 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7426 toneCurve->tonemap_points_cnt,
7427 CAM_MAX_TONEMAP_CURVE_SIZE);
7428 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7429 }
7430 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7431 (float*)toneCurve->curve.tonemap_points,
7432 toneCurve->tonemap_points_cnt * 2);
7433 }
7434
7435 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7436 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7437 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7438 predColorCorrectionGains->gains, 4);
7439 }
7440
7441 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7442 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7443 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7444 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7445 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7446 }
7447
7448 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7449 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7450 }
7451
7452 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7453 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7454 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7455 }
7456
7457 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7458 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7459 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7460 }
7461
7462 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7463 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7464 *effectMode);
7465 if (NAME_NOT_FOUND != val) {
7466 uint8_t fwk_effectMode = (uint8_t)val;
7467 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7468 }
7469 }
7470
7471 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7472 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7473 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7474 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7475 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7476 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7477 }
7478 int32_t fwk_testPatternData[4];
7479 fwk_testPatternData[0] = testPatternData->r;
7480 fwk_testPatternData[3] = testPatternData->b;
7481 switch (gCamCapability[mCameraId]->color_arrangement) {
7482 case CAM_FILTER_ARRANGEMENT_RGGB:
7483 case CAM_FILTER_ARRANGEMENT_GRBG:
7484 fwk_testPatternData[1] = testPatternData->gr;
7485 fwk_testPatternData[2] = testPatternData->gb;
7486 break;
7487 case CAM_FILTER_ARRANGEMENT_GBRG:
7488 case CAM_FILTER_ARRANGEMENT_BGGR:
7489 fwk_testPatternData[2] = testPatternData->gr;
7490 fwk_testPatternData[1] = testPatternData->gb;
7491 break;
7492 default:
7493 LOGE("color arrangement %d is not supported",
7494 gCamCapability[mCameraId]->color_arrangement);
7495 break;
7496 }
7497 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7498 }
7499
7500 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7501 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7502 }
7503
7504 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7505 String8 str((const char *)gps_methods);
7506 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7507 }
7508
7509 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7510 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7511 }
7512
7513 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7514 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7515 }
7516
7517 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7518 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7519 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7520 }
7521
7522 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7523 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7524 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7525 }
7526
7527 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7528 int32_t fwk_thumb_size[2];
7529 fwk_thumb_size[0] = thumb_size->width;
7530 fwk_thumb_size[1] = thumb_size->height;
7531 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7532 }
7533
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007534 // Skip reprocess metadata if there is no input stream.
7535 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7536 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7537 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7538 privateData,
7539 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007541 }
7542
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007543 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007544 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007545 meteringMode, 1);
7546 }
7547
Thierry Strudel54dc9782017-02-15 12:12:10 -08007548 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7549 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7550 LOGD("hdr_scene_data: %d %f\n",
7551 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7552 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7553 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7554 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7555 &isHdr, 1);
7556 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7557 &isHdrConfidence, 1);
7558 }
7559
7560
7561
Thierry Strudel3d639192016-09-09 11:52:26 -07007562 if (metadata->is_tuning_params_valid) {
7563 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7564 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7565 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7566
7567
7568 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7569 sizeof(uint32_t));
7570 data += sizeof(uint32_t);
7571
7572 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7573 sizeof(uint32_t));
7574 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7575 data += sizeof(uint32_t);
7576
7577 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7578 sizeof(uint32_t));
7579 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7580 data += sizeof(uint32_t);
7581
7582 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7583 sizeof(uint32_t));
7584 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7585 data += sizeof(uint32_t);
7586
7587 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7588 sizeof(uint32_t));
7589 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7590 data += sizeof(uint32_t);
7591
7592 metadata->tuning_params.tuning_mod3_data_size = 0;
7593 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7594 sizeof(uint32_t));
7595 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7596 data += sizeof(uint32_t);
7597
7598 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7599 TUNING_SENSOR_DATA_MAX);
7600 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7601 count);
7602 data += count;
7603
7604 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7605 TUNING_VFE_DATA_MAX);
7606 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7607 count);
7608 data += count;
7609
7610 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7611 TUNING_CPP_DATA_MAX);
7612 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7613 count);
7614 data += count;
7615
7616 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7617 TUNING_CAC_DATA_MAX);
7618 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7619 count);
7620 data += count;
7621
7622 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7623 (int32_t *)(void *)tuning_meta_data_blob,
7624 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7625 }
7626
7627 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7628 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7629 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7630 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7631 NEUTRAL_COL_POINTS);
7632 }
7633
7634 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7635 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7636 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7637 }
7638
7639 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7640 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7641 // Adjust crop region from sensor output coordinate system to active
7642 // array coordinate system.
7643 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7644 hAeRegions->rect.width, hAeRegions->rect.height);
7645
7646 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7647 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7648 REGIONS_TUPLE_COUNT);
7649 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7650 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7651 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7652 hAeRegions->rect.height);
7653 }
7654
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007655 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7656 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7657 if (NAME_NOT_FOUND != val) {
7658 uint8_t fwkAfMode = (uint8_t)val;
7659 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7660 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7661 } else {
7662 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7663 val);
7664 }
7665 }
7666
Thierry Strudel3d639192016-09-09 11:52:26 -07007667 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7668 uint8_t fwk_afState = (uint8_t) *afState;
7669 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007670 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007671 }
7672
7673 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7674 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7675 }
7676
7677 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7678 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7679 }
7680
7681 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7682 uint8_t fwk_lensState = *lensState;
7683 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7684 }
7685
Thierry Strudel3d639192016-09-09 11:52:26 -07007686
7687 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007688 uint32_t ab_mode = *hal_ab_mode;
7689 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7690 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7691 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7692 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007693 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007694 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007695 if (NAME_NOT_FOUND != val) {
7696 uint8_t fwk_ab_mode = (uint8_t)val;
7697 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7698 }
7699 }
7700
7701 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7702 int val = lookupFwkName(SCENE_MODES_MAP,
7703 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7704 if (NAME_NOT_FOUND != val) {
7705 uint8_t fwkBestshotMode = (uint8_t)val;
7706 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7707 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7708 } else {
7709 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7710 }
7711 }
7712
7713 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7714 uint8_t fwk_mode = (uint8_t) *mode;
7715 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7716 }
7717
7718 /* Constant metadata values to be update*/
7719 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7720 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7721
7722 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7723 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7724
7725 int32_t hotPixelMap[2];
7726 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7727
7728 // CDS
7729 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7730 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7731 }
7732
Thierry Strudel04e026f2016-10-10 11:27:36 -07007733 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7734 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007735 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007736 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7737 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7738 } else {
7739 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7740 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007741
7742 if(fwk_hdr != curr_hdr_state) {
7743 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7744 if(fwk_hdr)
7745 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7746 else
7747 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7748 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007749 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7750 }
7751
Thierry Strudel54dc9782017-02-15 12:12:10 -08007752 //binning correction
7753 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7754 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7755 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7756 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7757 }
7758
Thierry Strudel04e026f2016-10-10 11:27:36 -07007759 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007760 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007761 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7762 int8_t is_ir_on = 0;
7763
7764 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7765 if(is_ir_on != curr_ir_state) {
7766 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7767 if(is_ir_on)
7768 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7769 else
7770 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7771 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007772 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007773 }
7774
Thierry Strudel269c81a2016-10-12 12:13:59 -07007775 // AEC SPEED
7776 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7777 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7778 }
7779
7780 // AWB SPEED
7781 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7782 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7783 }
7784
Thierry Strudel3d639192016-09-09 11:52:26 -07007785 // TNR
7786 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7787 uint8_t tnr_enable = tnr->denoise_enable;
7788 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007789 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7790 int8_t is_tnr_on = 0;
7791
7792 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7793 if(is_tnr_on != curr_tnr_state) {
7794 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7795 if(is_tnr_on)
7796 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7797 else
7798 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7799 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007800
7801 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7802 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7803 }
7804
7805 // Reprocess crop data
7806 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7807 uint8_t cnt = crop_data->num_of_streams;
7808 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7809 // mm-qcamera-daemon only posts crop_data for streams
7810 // not linked to pproc. So no valid crop metadata is not
7811 // necessarily an error case.
7812 LOGD("No valid crop metadata entries");
7813 } else {
7814 uint32_t reproc_stream_id;
7815 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7816 LOGD("No reprocessible stream found, ignore crop data");
7817 } else {
7818 int rc = NO_ERROR;
7819 Vector<int32_t> roi_map;
7820 int32_t *crop = new int32_t[cnt*4];
7821 if (NULL == crop) {
7822 rc = NO_MEMORY;
7823 }
7824 if (NO_ERROR == rc) {
7825 int32_t streams_found = 0;
7826 for (size_t i = 0; i < cnt; i++) {
7827 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7828 if (pprocDone) {
7829 // HAL already does internal reprocessing,
7830 // either via reprocessing before JPEG encoding,
7831 // or offline postprocessing for pproc bypass case.
7832 crop[0] = 0;
7833 crop[1] = 0;
7834 crop[2] = mInputStreamInfo.dim.width;
7835 crop[3] = mInputStreamInfo.dim.height;
7836 } else {
7837 crop[0] = crop_data->crop_info[i].crop.left;
7838 crop[1] = crop_data->crop_info[i].crop.top;
7839 crop[2] = crop_data->crop_info[i].crop.width;
7840 crop[3] = crop_data->crop_info[i].crop.height;
7841 }
7842 roi_map.add(crop_data->crop_info[i].roi_map.left);
7843 roi_map.add(crop_data->crop_info[i].roi_map.top);
7844 roi_map.add(crop_data->crop_info[i].roi_map.width);
7845 roi_map.add(crop_data->crop_info[i].roi_map.height);
7846 streams_found++;
7847 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7848 crop[0], crop[1], crop[2], crop[3]);
7849 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7850 crop_data->crop_info[i].roi_map.left,
7851 crop_data->crop_info[i].roi_map.top,
7852 crop_data->crop_info[i].roi_map.width,
7853 crop_data->crop_info[i].roi_map.height);
7854 break;
7855
7856 }
7857 }
7858 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7859 &streams_found, 1);
7860 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7861 crop, (size_t)(streams_found * 4));
7862 if (roi_map.array()) {
7863 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7864 roi_map.array(), roi_map.size());
7865 }
7866 }
7867 if (crop) {
7868 delete [] crop;
7869 }
7870 }
7871 }
7872 }
7873
7874 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7875 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7876 // so hardcoding the CAC result to OFF mode.
7877 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7878 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7879 } else {
7880 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7881 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7882 *cacMode);
7883 if (NAME_NOT_FOUND != val) {
7884 uint8_t resultCacMode = (uint8_t)val;
7885 // check whether CAC result from CB is equal to Framework set CAC mode
7886 // If not equal then set the CAC mode came in corresponding request
7887 if (fwk_cacMode != resultCacMode) {
7888 resultCacMode = fwk_cacMode;
7889 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007890 //Check if CAC is disabled by property
7891 if (m_cacModeDisabled) {
7892 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7893 }
7894
Thierry Strudel3d639192016-09-09 11:52:26 -07007895 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7896 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7897 } else {
7898 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7899 }
7900 }
7901 }
7902
7903 // Post blob of cam_cds_data through vendor tag.
7904 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7905 uint8_t cnt = cdsInfo->num_of_streams;
7906 cam_cds_data_t cdsDataOverride;
7907 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7908 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7909 cdsDataOverride.num_of_streams = 1;
7910 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7911 uint32_t reproc_stream_id;
7912 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7913 LOGD("No reprocessible stream found, ignore cds data");
7914 } else {
7915 for (size_t i = 0; i < cnt; i++) {
7916 if (cdsInfo->cds_info[i].stream_id ==
7917 reproc_stream_id) {
7918 cdsDataOverride.cds_info[0].cds_enable =
7919 cdsInfo->cds_info[i].cds_enable;
7920 break;
7921 }
7922 }
7923 }
7924 } else {
7925 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7926 }
7927 camMetadata.update(QCAMERA3_CDS_INFO,
7928 (uint8_t *)&cdsDataOverride,
7929 sizeof(cam_cds_data_t));
7930 }
7931
7932 // Ldaf calibration data
7933 if (!mLdafCalibExist) {
7934 IF_META_AVAILABLE(uint32_t, ldafCalib,
7935 CAM_INTF_META_LDAF_EXIF, metadata) {
7936 mLdafCalibExist = true;
7937 mLdafCalib[0] = ldafCalib[0];
7938 mLdafCalib[1] = ldafCalib[1];
7939 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7940 ldafCalib[0], ldafCalib[1]);
7941 }
7942 }
7943
Thierry Strudel54dc9782017-02-15 12:12:10 -08007944 // EXIF debug data through vendor tag
7945 /*
7946 * Mobicat Mask can assume 3 values:
7947 * 1 refers to Mobicat data,
7948 * 2 refers to Stats Debug and Exif Debug Data
7949 * 3 refers to Mobicat and Stats Debug Data
7950 * We want to make sure that we are sending Exif debug data
7951 * only when Mobicat Mask is 2.
7952 */
7953 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7954 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7955 (uint8_t *)(void *)mExifParams.debug_params,
7956 sizeof(mm_jpeg_debug_exif_params_t));
7957 }
7958
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007959 // Reprocess and DDM debug data through vendor tag
7960 cam_reprocess_info_t repro_info;
7961 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007962 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7963 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007964 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007965 }
7966 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7967 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007968 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007969 }
7970 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7971 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007972 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007973 }
7974 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7975 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007976 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007977 }
7978 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7979 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007980 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007981 }
7982 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007983 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007984 }
7985 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7986 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007987 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007988 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007989 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7990 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7991 }
7992 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7993 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7994 }
7995 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7996 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007997
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007998 // INSTANT AEC MODE
7999 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8000 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8001 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8002 }
8003
Shuzhen Wange763e802016-03-31 10:24:29 -07008004 // AF scene change
8005 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8006 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8007 }
8008
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008009 // Enable ZSL
8010 if (enableZsl != nullptr) {
8011 uint8_t value = *enableZsl ?
8012 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8013 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8014 }
8015
Xu Han821ea9c2017-05-23 09:00:40 -07008016 // OIS Data
8017 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8018 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8019 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8020 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8021 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8022 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8023 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8024 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8025 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8026 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8027 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8028 }
8029
Thierry Strudel3d639192016-09-09 11:52:26 -07008030 resultMetadata = camMetadata.release();
8031 return resultMetadata;
8032}
8033
8034/*===========================================================================
8035 * FUNCTION : saveExifParams
8036 *
8037 * DESCRIPTION:
8038 *
8039 * PARAMETERS :
8040 * @metadata : metadata information from callback
8041 *
8042 * RETURN : none
8043 *
8044 *==========================================================================*/
8045void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8046{
8047 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8048 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8049 if (mExifParams.debug_params) {
8050 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8051 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8052 }
8053 }
8054 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8055 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8056 if (mExifParams.debug_params) {
8057 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8058 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8059 }
8060 }
8061 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8062 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8063 if (mExifParams.debug_params) {
8064 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8065 mExifParams.debug_params->af_debug_params_valid = TRUE;
8066 }
8067 }
8068 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8069 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8070 if (mExifParams.debug_params) {
8071 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8072 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8073 }
8074 }
8075 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8076 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8077 if (mExifParams.debug_params) {
8078 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8079 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8080 }
8081 }
8082 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8083 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8084 if (mExifParams.debug_params) {
8085 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8086 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8087 }
8088 }
8089 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8090 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8091 if (mExifParams.debug_params) {
8092 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8093 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8094 }
8095 }
8096 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8097 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8098 if (mExifParams.debug_params) {
8099 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8100 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8101 }
8102 }
8103}
8104
8105/*===========================================================================
8106 * FUNCTION : get3AExifParams
8107 *
8108 * DESCRIPTION:
8109 *
8110 * PARAMETERS : none
8111 *
8112 *
8113 * RETURN : mm_jpeg_exif_params_t
8114 *
8115 *==========================================================================*/
8116mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8117{
8118 return mExifParams;
8119}
8120
8121/*===========================================================================
8122 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8123 *
8124 * DESCRIPTION:
8125 *
8126 * PARAMETERS :
8127 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008128 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8129 * urgent metadata in a batch. Always true for
8130 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008131 *
8132 * RETURN : camera_metadata_t*
8133 * metadata in a format specified by fwk
8134 *==========================================================================*/
8135camera_metadata_t*
8136QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008137 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008138{
8139 CameraMetadata camMetadata;
8140 camera_metadata_t *resultMetadata;
8141
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008142 if (!lastUrgentMetadataInBatch) {
8143 /* In batch mode, use empty metadata if this is not the last in batch
8144 */
8145 resultMetadata = allocate_camera_metadata(0, 0);
8146 return resultMetadata;
8147 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008148
8149 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8150 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8151 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8152 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8153 }
8154
8155 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8156 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8157 &aecTrigger->trigger, 1);
8158 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8159 &aecTrigger->trigger_id, 1);
8160 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8161 aecTrigger->trigger);
8162 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8163 aecTrigger->trigger_id);
8164 }
8165
8166 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8167 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8168 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8169 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8170 }
8171
Thierry Strudel3d639192016-09-09 11:52:26 -07008172 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8173 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8174 &af_trigger->trigger, 1);
8175 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8176 af_trigger->trigger);
8177 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8178 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8179 af_trigger->trigger_id);
8180 }
8181
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008182 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8183 /*af regions*/
8184 int32_t afRegions[REGIONS_TUPLE_COUNT];
8185 // Adjust crop region from sensor output coordinate system to active
8186 // array coordinate system.
8187 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8188 hAfRegions->rect.width, hAfRegions->rect.height);
8189
8190 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8191 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8192 REGIONS_TUPLE_COUNT);
8193 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8194 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8195 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8196 hAfRegions->rect.height);
8197 }
8198
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008199 // AF region confidence
8200 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8201 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8202 }
8203
Thierry Strudel3d639192016-09-09 11:52:26 -07008204 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8205 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8206 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8207 if (NAME_NOT_FOUND != val) {
8208 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8209 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8210 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8211 } else {
8212 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8213 }
8214 }
8215
8216 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8217 uint32_t aeMode = CAM_AE_MODE_MAX;
8218 int32_t flashMode = CAM_FLASH_MODE_MAX;
8219 int32_t redeye = -1;
8220 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8221 aeMode = *pAeMode;
8222 }
8223 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8224 flashMode = *pFlashMode;
8225 }
8226 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8227 redeye = *pRedeye;
8228 }
8229
8230 if (1 == redeye) {
8231 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8232 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8233 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8234 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8235 flashMode);
8236 if (NAME_NOT_FOUND != val) {
8237 fwk_aeMode = (uint8_t)val;
8238 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8239 } else {
8240 LOGE("Unsupported flash mode %d", flashMode);
8241 }
8242 } else if (aeMode == CAM_AE_MODE_ON) {
8243 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8244 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8245 } else if (aeMode == CAM_AE_MODE_OFF) {
8246 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8247 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008248 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8249 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8250 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008251 } else {
8252 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8253 "flashMode:%d, aeMode:%u!!!",
8254 redeye, flashMode, aeMode);
8255 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008256 if (mInstantAEC) {
8257 // Increment frame Idx count untill a bound reached for instant AEC.
8258 mInstantAecFrameIdxCount++;
8259 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8260 CAM_INTF_META_AEC_INFO, metadata) {
8261 LOGH("ae_params->settled = %d",ae_params->settled);
8262 // If AEC settled, or if number of frames reached bound value,
8263 // should reset instant AEC.
8264 if (ae_params->settled ||
8265 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8266 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8267 mInstantAEC = false;
8268 mResetInstantAEC = true;
8269 mInstantAecFrameIdxCount = 0;
8270 }
8271 }
8272 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008273 resultMetadata = camMetadata.release();
8274 return resultMetadata;
8275}
8276
8277/*===========================================================================
8278 * FUNCTION : dumpMetadataToFile
8279 *
8280 * DESCRIPTION: Dumps tuning metadata to file system
8281 *
8282 * PARAMETERS :
8283 * @meta : tuning metadata
8284 * @dumpFrameCount : current dump frame count
8285 * @enabled : Enable mask
8286 *
8287 *==========================================================================*/
8288void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8289 uint32_t &dumpFrameCount,
8290 bool enabled,
8291 const char *type,
8292 uint32_t frameNumber)
8293{
8294 //Some sanity checks
8295 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8296 LOGE("Tuning sensor data size bigger than expected %d: %d",
8297 meta.tuning_sensor_data_size,
8298 TUNING_SENSOR_DATA_MAX);
8299 return;
8300 }
8301
8302 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8303 LOGE("Tuning VFE data size bigger than expected %d: %d",
8304 meta.tuning_vfe_data_size,
8305 TUNING_VFE_DATA_MAX);
8306 return;
8307 }
8308
8309 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8310 LOGE("Tuning CPP data size bigger than expected %d: %d",
8311 meta.tuning_cpp_data_size,
8312 TUNING_CPP_DATA_MAX);
8313 return;
8314 }
8315
8316 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8317 LOGE("Tuning CAC data size bigger than expected %d: %d",
8318 meta.tuning_cac_data_size,
8319 TUNING_CAC_DATA_MAX);
8320 return;
8321 }
8322 //
8323
8324 if(enabled){
8325 char timeBuf[FILENAME_MAX];
8326 char buf[FILENAME_MAX];
8327 memset(buf, 0, sizeof(buf));
8328 memset(timeBuf, 0, sizeof(timeBuf));
8329 time_t current_time;
8330 struct tm * timeinfo;
8331 time (&current_time);
8332 timeinfo = localtime (&current_time);
8333 if (timeinfo != NULL) {
8334 strftime (timeBuf, sizeof(timeBuf),
8335 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8336 }
8337 String8 filePath(timeBuf);
8338 snprintf(buf,
8339 sizeof(buf),
8340 "%dm_%s_%d.bin",
8341 dumpFrameCount,
8342 type,
8343 frameNumber);
8344 filePath.append(buf);
8345 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8346 if (file_fd >= 0) {
8347 ssize_t written_len = 0;
8348 meta.tuning_data_version = TUNING_DATA_VERSION;
8349 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8350 written_len += write(file_fd, data, sizeof(uint32_t));
8351 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8352 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8353 written_len += write(file_fd, data, sizeof(uint32_t));
8354 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8355 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8356 written_len += write(file_fd, data, sizeof(uint32_t));
8357 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8358 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8359 written_len += write(file_fd, data, sizeof(uint32_t));
8360 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8361 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8362 written_len += write(file_fd, data, sizeof(uint32_t));
8363 meta.tuning_mod3_data_size = 0;
8364 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8365 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8366 written_len += write(file_fd, data, sizeof(uint32_t));
8367 size_t total_size = meta.tuning_sensor_data_size;
8368 data = (void *)((uint8_t *)&meta.data);
8369 written_len += write(file_fd, data, total_size);
8370 total_size = meta.tuning_vfe_data_size;
8371 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8372 written_len += write(file_fd, data, total_size);
8373 total_size = meta.tuning_cpp_data_size;
8374 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8375 written_len += write(file_fd, data, total_size);
8376 total_size = meta.tuning_cac_data_size;
8377 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8378 written_len += write(file_fd, data, total_size);
8379 close(file_fd);
8380 }else {
8381 LOGE("fail to open file for metadata dumping");
8382 }
8383 }
8384}
8385
8386/*===========================================================================
8387 * FUNCTION : cleanAndSortStreamInfo
8388 *
8389 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8390 * and sort them such that raw stream is at the end of the list
8391 * This is a workaround for camera daemon constraint.
8392 *
8393 * PARAMETERS : None
8394 *
8395 *==========================================================================*/
8396void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8397{
8398 List<stream_info_t *> newStreamInfo;
8399
8400 /*clean up invalid streams*/
8401 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8402 it != mStreamInfo.end();) {
8403 if(((*it)->status) == INVALID){
8404 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8405 delete channel;
8406 free(*it);
8407 it = mStreamInfo.erase(it);
8408 } else {
8409 it++;
8410 }
8411 }
8412
8413 // Move preview/video/callback/snapshot streams into newList
8414 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8415 it != mStreamInfo.end();) {
8416 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8417 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8418 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8419 newStreamInfo.push_back(*it);
8420 it = mStreamInfo.erase(it);
8421 } else
8422 it++;
8423 }
8424 // Move raw streams into newList
8425 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8426 it != mStreamInfo.end();) {
8427 newStreamInfo.push_back(*it);
8428 it = mStreamInfo.erase(it);
8429 }
8430
8431 mStreamInfo = newStreamInfo;
8432}
8433
8434/*===========================================================================
8435 * FUNCTION : extractJpegMetadata
8436 *
8437 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8438 * JPEG metadata is cached in HAL, and return as part of capture
8439 * result when metadata is returned from camera daemon.
8440 *
8441 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8442 * @request: capture request
8443 *
8444 *==========================================================================*/
8445void QCamera3HardwareInterface::extractJpegMetadata(
8446 CameraMetadata& jpegMetadata,
8447 const camera3_capture_request_t *request)
8448{
8449 CameraMetadata frame_settings;
8450 frame_settings = request->settings;
8451
8452 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8453 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8454 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8455 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8456
8457 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8458 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8459 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8460 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8461
8462 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8463 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8464 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8465 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8466
8467 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8468 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8469 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8470 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8471
8472 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8473 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8474 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8475 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8476
8477 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8478 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8479 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8480 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8481
8482 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8483 int32_t thumbnail_size[2];
8484 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8485 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8486 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8487 int32_t orientation =
8488 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008489 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008490 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8491 int32_t temp;
8492 temp = thumbnail_size[0];
8493 thumbnail_size[0] = thumbnail_size[1];
8494 thumbnail_size[1] = temp;
8495 }
8496 }
8497 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8498 thumbnail_size,
8499 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8500 }
8501
8502}
8503
8504/*===========================================================================
8505 * FUNCTION : convertToRegions
8506 *
8507 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8508 *
8509 * PARAMETERS :
8510 * @rect : cam_rect_t struct to convert
8511 * @region : int32_t destination array
8512 * @weight : if we are converting from cam_area_t, weight is valid
8513 * else weight = -1
8514 *
8515 *==========================================================================*/
8516void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8517 int32_t *region, int weight)
8518{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008519 region[FACE_LEFT] = rect.left;
8520 region[FACE_TOP] = rect.top;
8521 region[FACE_RIGHT] = rect.left + rect.width;
8522 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008523 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008524 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008525 }
8526}
8527
8528/*===========================================================================
8529 * FUNCTION : convertFromRegions
8530 *
8531 * DESCRIPTION: helper method to convert from array to cam_rect_t
8532 *
8533 * PARAMETERS :
8534 * @rect : cam_rect_t struct to convert
8535 * @region : int32_t destination array
8536 * @weight : if we are converting from cam_area_t, weight is valid
8537 * else weight = -1
8538 *
8539 *==========================================================================*/
8540void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008541 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008542{
Thierry Strudel3d639192016-09-09 11:52:26 -07008543 int32_t x_min = frame_settings.find(tag).data.i32[0];
8544 int32_t y_min = frame_settings.find(tag).data.i32[1];
8545 int32_t x_max = frame_settings.find(tag).data.i32[2];
8546 int32_t y_max = frame_settings.find(tag).data.i32[3];
8547 roi.weight = frame_settings.find(tag).data.i32[4];
8548 roi.rect.left = x_min;
8549 roi.rect.top = y_min;
8550 roi.rect.width = x_max - x_min;
8551 roi.rect.height = y_max - y_min;
8552}
8553
8554/*===========================================================================
8555 * FUNCTION : resetIfNeededROI
8556 *
8557 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8558 * crop region
8559 *
8560 * PARAMETERS :
8561 * @roi : cam_area_t struct to resize
8562 * @scalerCropRegion : cam_crop_region_t region to compare against
8563 *
8564 *
8565 *==========================================================================*/
8566bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8567 const cam_crop_region_t* scalerCropRegion)
8568{
8569 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8570 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8571 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8572 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8573
8574 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8575 * without having this check the calculations below to validate if the roi
8576 * is inside scalar crop region will fail resulting in the roi not being
8577 * reset causing algorithm to continue to use stale roi window
8578 */
8579 if (roi->weight == 0) {
8580 return true;
8581 }
8582
8583 if ((roi_x_max < scalerCropRegion->left) ||
8584 // right edge of roi window is left of scalar crop's left edge
8585 (roi_y_max < scalerCropRegion->top) ||
8586 // bottom edge of roi window is above scalar crop's top edge
8587 (roi->rect.left > crop_x_max) ||
8588 // left edge of roi window is beyond(right) of scalar crop's right edge
8589 (roi->rect.top > crop_y_max)){
8590 // top edge of roi windo is above scalar crop's top edge
8591 return false;
8592 }
8593 if (roi->rect.left < scalerCropRegion->left) {
8594 roi->rect.left = scalerCropRegion->left;
8595 }
8596 if (roi->rect.top < scalerCropRegion->top) {
8597 roi->rect.top = scalerCropRegion->top;
8598 }
8599 if (roi_x_max > crop_x_max) {
8600 roi_x_max = crop_x_max;
8601 }
8602 if (roi_y_max > crop_y_max) {
8603 roi_y_max = crop_y_max;
8604 }
8605 roi->rect.width = roi_x_max - roi->rect.left;
8606 roi->rect.height = roi_y_max - roi->rect.top;
8607 return true;
8608}
8609
8610/*===========================================================================
8611 * FUNCTION : convertLandmarks
8612 *
8613 * DESCRIPTION: helper method to extract the landmarks from face detection info
8614 *
8615 * PARAMETERS :
8616 * @landmark_data : input landmark data to be converted
8617 * @landmarks : int32_t destination array
8618 *
8619 *
8620 *==========================================================================*/
8621void QCamera3HardwareInterface::convertLandmarks(
8622 cam_face_landmarks_info_t landmark_data,
8623 int32_t *landmarks)
8624{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008625 if (landmark_data.is_left_eye_valid) {
8626 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8627 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8628 } else {
8629 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8630 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8631 }
8632
8633 if (landmark_data.is_right_eye_valid) {
8634 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8635 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8636 } else {
8637 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8638 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8639 }
8640
8641 if (landmark_data.is_mouth_valid) {
8642 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8643 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8644 } else {
8645 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8646 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8647 }
8648}
8649
8650/*===========================================================================
8651 * FUNCTION : setInvalidLandmarks
8652 *
8653 * DESCRIPTION: helper method to set invalid landmarks
8654 *
8655 * PARAMETERS :
8656 * @landmarks : int32_t destination array
8657 *
8658 *
8659 *==========================================================================*/
8660void QCamera3HardwareInterface::setInvalidLandmarks(
8661 int32_t *landmarks)
8662{
8663 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8664 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8665 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8666 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8667 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8668 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008669}
8670
8671#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008672
8673/*===========================================================================
8674 * FUNCTION : getCapabilities
8675 *
8676 * DESCRIPTION: query camera capability from back-end
8677 *
8678 * PARAMETERS :
8679 * @ops : mm-interface ops structure
8680 * @cam_handle : camera handle for which we need capability
8681 *
8682 * RETURN : ptr type of capability structure
8683 * capability for success
8684 * NULL for failure
8685 *==========================================================================*/
8686cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8687 uint32_t cam_handle)
8688{
8689 int rc = NO_ERROR;
8690 QCamera3HeapMemory *capabilityHeap = NULL;
8691 cam_capability_t *cap_ptr = NULL;
8692
8693 if (ops == NULL) {
8694 LOGE("Invalid arguments");
8695 return NULL;
8696 }
8697
8698 capabilityHeap = new QCamera3HeapMemory(1);
8699 if (capabilityHeap == NULL) {
8700 LOGE("creation of capabilityHeap failed");
8701 return NULL;
8702 }
8703
8704 /* Allocate memory for capability buffer */
8705 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8706 if(rc != OK) {
8707 LOGE("No memory for cappability");
8708 goto allocate_failed;
8709 }
8710
8711 /* Map memory for capability buffer */
8712 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8713
8714 rc = ops->map_buf(cam_handle,
8715 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8716 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8717 if(rc < 0) {
8718 LOGE("failed to map capability buffer");
8719 rc = FAILED_TRANSACTION;
8720 goto map_failed;
8721 }
8722
8723 /* Query Capability */
8724 rc = ops->query_capability(cam_handle);
8725 if(rc < 0) {
8726 LOGE("failed to query capability");
8727 rc = FAILED_TRANSACTION;
8728 goto query_failed;
8729 }
8730
8731 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8732 if (cap_ptr == NULL) {
8733 LOGE("out of memory");
8734 rc = NO_MEMORY;
8735 goto query_failed;
8736 }
8737
8738 memset(cap_ptr, 0, sizeof(cam_capability_t));
8739 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8740
8741 int index;
8742 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8743 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8744 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8745 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8746 }
8747
8748query_failed:
8749 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8750map_failed:
8751 capabilityHeap->deallocate();
8752allocate_failed:
8753 delete capabilityHeap;
8754
8755 if (rc != NO_ERROR) {
8756 return NULL;
8757 } else {
8758 return cap_ptr;
8759 }
8760}
8761
Thierry Strudel3d639192016-09-09 11:52:26 -07008762/*===========================================================================
8763 * FUNCTION : initCapabilities
8764 *
8765 * DESCRIPTION: initialize camera capabilities in static data struct
8766 *
8767 * PARAMETERS :
8768 * @cameraId : camera Id
8769 *
8770 * RETURN : int32_t type of status
8771 * NO_ERROR -- success
8772 * none-zero failure code
8773 *==========================================================================*/
8774int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8775{
8776 int rc = 0;
8777 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008778 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008779
8780 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8781 if (rc) {
8782 LOGE("camera_open failed. rc = %d", rc);
8783 goto open_failed;
8784 }
8785 if (!cameraHandle) {
8786 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8787 goto open_failed;
8788 }
8789
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008790 handle = get_main_camera_handle(cameraHandle->camera_handle);
8791 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8792 if (gCamCapability[cameraId] == NULL) {
8793 rc = FAILED_TRANSACTION;
8794 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008795 }
8796
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008797 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008798 if (is_dual_camera_by_idx(cameraId)) {
8799 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8800 gCamCapability[cameraId]->aux_cam_cap =
8801 getCapabilities(cameraHandle->ops, handle);
8802 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8803 rc = FAILED_TRANSACTION;
8804 free(gCamCapability[cameraId]);
8805 goto failed_op;
8806 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008807
8808 // Copy the main camera capability to main_cam_cap struct
8809 gCamCapability[cameraId]->main_cam_cap =
8810 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8811 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8812 LOGE("out of memory");
8813 rc = NO_MEMORY;
8814 goto failed_op;
8815 }
8816 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8817 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008818 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008819failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008820 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8821 cameraHandle = NULL;
8822open_failed:
8823 return rc;
8824}
8825
8826/*==========================================================================
8827 * FUNCTION : get3Aversion
8828 *
8829 * DESCRIPTION: get the Q3A S/W version
8830 *
8831 * PARAMETERS :
8832 * @sw_version: Reference of Q3A structure which will hold version info upon
8833 * return
8834 *
8835 * RETURN : None
8836 *
8837 *==========================================================================*/
8838void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8839{
8840 if(gCamCapability[mCameraId])
8841 sw_version = gCamCapability[mCameraId]->q3a_version;
8842 else
8843 LOGE("Capability structure NULL!");
8844}
8845
8846
8847/*===========================================================================
8848 * FUNCTION : initParameters
8849 *
8850 * DESCRIPTION: initialize camera parameters
8851 *
8852 * PARAMETERS :
8853 *
8854 * RETURN : int32_t type of status
8855 * NO_ERROR -- success
8856 * none-zero failure code
8857 *==========================================================================*/
8858int QCamera3HardwareInterface::initParameters()
8859{
8860 int rc = 0;
8861
8862 //Allocate Set Param Buffer
8863 mParamHeap = new QCamera3HeapMemory(1);
8864 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8865 if(rc != OK) {
8866 rc = NO_MEMORY;
8867 LOGE("Failed to allocate SETPARM Heap memory");
8868 delete mParamHeap;
8869 mParamHeap = NULL;
8870 return rc;
8871 }
8872
8873 //Map memory for parameters buffer
8874 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8875 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8876 mParamHeap->getFd(0),
8877 sizeof(metadata_buffer_t),
8878 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8879 if(rc < 0) {
8880 LOGE("failed to map SETPARM buffer");
8881 rc = FAILED_TRANSACTION;
8882 mParamHeap->deallocate();
8883 delete mParamHeap;
8884 mParamHeap = NULL;
8885 return rc;
8886 }
8887
8888 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8889
8890 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8891 return rc;
8892}
8893
8894/*===========================================================================
8895 * FUNCTION : deinitParameters
8896 *
8897 * DESCRIPTION: de-initialize camera parameters
8898 *
8899 * PARAMETERS :
8900 *
8901 * RETURN : NONE
8902 *==========================================================================*/
8903void QCamera3HardwareInterface::deinitParameters()
8904{
8905 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8906 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8907
8908 mParamHeap->deallocate();
8909 delete mParamHeap;
8910 mParamHeap = NULL;
8911
8912 mParameters = NULL;
8913
8914 free(mPrevParameters);
8915 mPrevParameters = NULL;
8916}
8917
8918/*===========================================================================
8919 * FUNCTION : calcMaxJpegSize
8920 *
8921 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8922 *
8923 * PARAMETERS :
8924 *
8925 * RETURN : max_jpeg_size
8926 *==========================================================================*/
8927size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8928{
8929 size_t max_jpeg_size = 0;
8930 size_t temp_width, temp_height;
8931 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8932 MAX_SIZES_CNT);
8933 for (size_t i = 0; i < count; i++) {
8934 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8935 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8936 if (temp_width * temp_height > max_jpeg_size ) {
8937 max_jpeg_size = temp_width * temp_height;
8938 }
8939 }
8940 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8941 return max_jpeg_size;
8942}
8943
8944/*===========================================================================
8945 * FUNCTION : getMaxRawSize
8946 *
8947 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8948 *
8949 * PARAMETERS :
8950 *
8951 * RETURN : Largest supported Raw Dimension
8952 *==========================================================================*/
8953cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8954{
8955 int max_width = 0;
8956 cam_dimension_t maxRawSize;
8957
8958 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8959 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8960 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8961 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8962 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8963 }
8964 }
8965 return maxRawSize;
8966}
8967
8968
8969/*===========================================================================
8970 * FUNCTION : calcMaxJpegDim
8971 *
8972 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8973 *
8974 * PARAMETERS :
8975 *
8976 * RETURN : max_jpeg_dim
8977 *==========================================================================*/
8978cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8979{
8980 cam_dimension_t max_jpeg_dim;
8981 cam_dimension_t curr_jpeg_dim;
8982 max_jpeg_dim.width = 0;
8983 max_jpeg_dim.height = 0;
8984 curr_jpeg_dim.width = 0;
8985 curr_jpeg_dim.height = 0;
8986 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8987 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8988 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8989 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8990 max_jpeg_dim.width * max_jpeg_dim.height ) {
8991 max_jpeg_dim.width = curr_jpeg_dim.width;
8992 max_jpeg_dim.height = curr_jpeg_dim.height;
8993 }
8994 }
8995 return max_jpeg_dim;
8996}
8997
8998/*===========================================================================
8999 * FUNCTION : addStreamConfig
9000 *
9001 * DESCRIPTION: adds the stream configuration to the array
9002 *
9003 * PARAMETERS :
9004 * @available_stream_configs : pointer to stream configuration array
9005 * @scalar_format : scalar format
9006 * @dim : configuration dimension
9007 * @config_type : input or output configuration type
9008 *
9009 * RETURN : NONE
9010 *==========================================================================*/
9011void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9012 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9013{
9014 available_stream_configs.add(scalar_format);
9015 available_stream_configs.add(dim.width);
9016 available_stream_configs.add(dim.height);
9017 available_stream_configs.add(config_type);
9018}
9019
9020/*===========================================================================
9021 * FUNCTION : suppportBurstCapture
9022 *
9023 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9024 *
9025 * PARAMETERS :
9026 * @cameraId : camera Id
9027 *
9028 * RETURN : true if camera supports BURST_CAPTURE
9029 * false otherwise
9030 *==========================================================================*/
9031bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9032{
9033 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9034 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9035 const int32_t highResWidth = 3264;
9036 const int32_t highResHeight = 2448;
9037
9038 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9039 // Maximum resolution images cannot be captured at >= 10fps
9040 // -> not supporting BURST_CAPTURE
9041 return false;
9042 }
9043
9044 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9045 // Maximum resolution images can be captured at >= 20fps
9046 // --> supporting BURST_CAPTURE
9047 return true;
9048 }
9049
9050 // Find the smallest highRes resolution, or largest resolution if there is none
9051 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9052 MAX_SIZES_CNT);
9053 size_t highRes = 0;
9054 while ((highRes + 1 < totalCnt) &&
9055 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9056 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9057 highResWidth * highResHeight)) {
9058 highRes++;
9059 }
9060 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9061 return true;
9062 } else {
9063 return false;
9064 }
9065}
9066
9067/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009068 * FUNCTION : getPDStatIndex
9069 *
9070 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9071 *
9072 * PARAMETERS :
9073 * @caps : camera capabilities
9074 *
9075 * RETURN : int32_t type
9076 * non-negative - on success
9077 * -1 - on failure
9078 *==========================================================================*/
9079int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9080 if (nullptr == caps) {
9081 return -1;
9082 }
9083
9084 uint32_t metaRawCount = caps->meta_raw_channel_count;
9085 int32_t ret = -1;
9086 for (size_t i = 0; i < metaRawCount; i++) {
9087 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9088 ret = i;
9089 break;
9090 }
9091 }
9092
9093 return ret;
9094}
9095
9096/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009097 * FUNCTION : initStaticMetadata
9098 *
9099 * DESCRIPTION: initialize the static metadata
9100 *
9101 * PARAMETERS :
9102 * @cameraId : camera Id
9103 *
9104 * RETURN : int32_t type of status
9105 * 0 -- success
9106 * non-zero failure code
9107 *==========================================================================*/
9108int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9109{
9110 int rc = 0;
9111 CameraMetadata staticInfo;
9112 size_t count = 0;
9113 bool limitedDevice = false;
9114 char prop[PROPERTY_VALUE_MAX];
9115 bool supportBurst = false;
9116
9117 supportBurst = supportBurstCapture(cameraId);
9118
9119 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9120 * guaranteed or if min fps of max resolution is less than 20 fps, its
9121 * advertised as limited device*/
9122 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9123 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9124 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9125 !supportBurst;
9126
9127 uint8_t supportedHwLvl = limitedDevice ?
9128 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009129#ifndef USE_HAL_3_3
9130 // LEVEL_3 - This device will support level 3.
9131 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9132#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009133 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009134#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009135
9136 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9137 &supportedHwLvl, 1);
9138
9139 bool facingBack = false;
9140 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9141 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9142 facingBack = true;
9143 }
9144 /*HAL 3 only*/
9145 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9146 &gCamCapability[cameraId]->min_focus_distance, 1);
9147
9148 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9149 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9150
9151 /*should be using focal lengths but sensor doesn't provide that info now*/
9152 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9153 &gCamCapability[cameraId]->focal_length,
9154 1);
9155
9156 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9157 gCamCapability[cameraId]->apertures,
9158 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9159
9160 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9161 gCamCapability[cameraId]->filter_densities,
9162 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9163
9164
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009165 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9166 size_t mode_count =
9167 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9168 for (size_t i = 0; i < mode_count; i++) {
9169 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9170 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009171 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009172 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009173
9174 int32_t lens_shading_map_size[] = {
9175 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9176 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9177 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9178 lens_shading_map_size,
9179 sizeof(lens_shading_map_size)/sizeof(int32_t));
9180
9181 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9182 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9183
9184 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9185 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9186
9187 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9188 &gCamCapability[cameraId]->max_frame_duration, 1);
9189
9190 camera_metadata_rational baseGainFactor = {
9191 gCamCapability[cameraId]->base_gain_factor.numerator,
9192 gCamCapability[cameraId]->base_gain_factor.denominator};
9193 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9194 &baseGainFactor, 1);
9195
9196 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9197 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9198
9199 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9200 gCamCapability[cameraId]->pixel_array_size.height};
9201 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9202 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9203
9204 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9205 gCamCapability[cameraId]->active_array_size.top,
9206 gCamCapability[cameraId]->active_array_size.width,
9207 gCamCapability[cameraId]->active_array_size.height};
9208 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9209 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9210
9211 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9212 &gCamCapability[cameraId]->white_level, 1);
9213
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009214 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9215 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9216 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009217 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009218 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009219
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009220#ifndef USE_HAL_3_3
9221 bool hasBlackRegions = false;
9222 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9223 LOGW("black_region_count: %d is bounded to %d",
9224 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9225 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9226 }
9227 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9228 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9229 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9230 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9231 }
9232 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9233 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9234 hasBlackRegions = true;
9235 }
9236#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009237 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9238 &gCamCapability[cameraId]->flash_charge_duration, 1);
9239
9240 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9241 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9242
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009243 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9244 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9245 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009246 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9247 &timestampSource, 1);
9248
Thierry Strudel54dc9782017-02-15 12:12:10 -08009249 //update histogram vendor data
9250 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009251 &gCamCapability[cameraId]->histogram_size, 1);
9252
Thierry Strudel54dc9782017-02-15 12:12:10 -08009253 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009254 &gCamCapability[cameraId]->max_histogram_count, 1);
9255
Shuzhen Wang14415f52016-11-16 18:26:18 -08009256 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9257 //so that app can request fewer number of bins than the maximum supported.
9258 std::vector<int32_t> histBins;
9259 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9260 histBins.push_back(maxHistBins);
9261 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9262 (maxHistBins & 0x1) == 0) {
9263 histBins.push_back(maxHistBins >> 1);
9264 maxHistBins >>= 1;
9265 }
9266 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9267 histBins.data(), histBins.size());
9268
Thierry Strudel3d639192016-09-09 11:52:26 -07009269 int32_t sharpness_map_size[] = {
9270 gCamCapability[cameraId]->sharpness_map_size.width,
9271 gCamCapability[cameraId]->sharpness_map_size.height};
9272
9273 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9274 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9275
9276 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9277 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9278
Emilian Peev0f3c3162017-03-15 12:57:46 +00009279 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9280 if (0 <= indexPD) {
9281 // Advertise PD stats data as part of the Depth capabilities
9282 int32_t depthWidth =
9283 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9284 int32_t depthHeight =
9285 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev666f5142017-06-02 16:47:04 +01009286 int32_t depthStride =
9287 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009288 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9289 assert(0 < depthSamplesCount);
9290 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9291 &depthSamplesCount, 1);
9292
9293 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9294 depthHeight,
9295 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9296 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9297 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9298 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9299 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9300
9301 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9302 depthHeight, 33333333,
9303 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9304 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9305 depthMinDuration,
9306 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9307
9308 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9309 depthHeight, 0,
9310 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9311 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9312 depthStallDuration,
9313 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9314
9315 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9316 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev666f5142017-06-02 16:47:04 +01009317
9318 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9319 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9320 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009321 }
9322
Thierry Strudel3d639192016-09-09 11:52:26 -07009323 int32_t scalar_formats[] = {
9324 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9325 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9326 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9327 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9328 HAL_PIXEL_FORMAT_RAW10,
9329 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009330 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9331 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9332 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009333
9334 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9335 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9336 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9337 count, MAX_SIZES_CNT, available_processed_sizes);
9338 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9339 available_processed_sizes, count * 2);
9340
9341 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9342 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9343 makeTable(gCamCapability[cameraId]->raw_dim,
9344 count, MAX_SIZES_CNT, available_raw_sizes);
9345 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9346 available_raw_sizes, count * 2);
9347
9348 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9349 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9350 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9351 count, MAX_SIZES_CNT, available_fps_ranges);
9352 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9353 available_fps_ranges, count * 2);
9354
9355 camera_metadata_rational exposureCompensationStep = {
9356 gCamCapability[cameraId]->exp_compensation_step.numerator,
9357 gCamCapability[cameraId]->exp_compensation_step.denominator};
9358 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9359 &exposureCompensationStep, 1);
9360
9361 Vector<uint8_t> availableVstabModes;
9362 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9363 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009364 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009365 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009366 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009367 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009368 count = IS_TYPE_MAX;
9369 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9370 for (size_t i = 0; i < count; i++) {
9371 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9372 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9373 eisSupported = true;
9374 break;
9375 }
9376 }
9377 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009378 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9379 }
9380 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9381 availableVstabModes.array(), availableVstabModes.size());
9382
9383 /*HAL 1 and HAL 3 common*/
9384 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9385 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9386 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009387 // Cap the max zoom to the max preferred value
9388 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009389 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9390 &maxZoom, 1);
9391
9392 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9393 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9394
9395 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9396 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9397 max3aRegions[2] = 0; /* AF not supported */
9398 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9399 max3aRegions, 3);
9400
9401 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9402 memset(prop, 0, sizeof(prop));
9403 property_get("persist.camera.facedetect", prop, "1");
9404 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9405 LOGD("Support face detection mode: %d",
9406 supportedFaceDetectMode);
9407
9408 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009409 /* support mode should be OFF if max number of face is 0 */
9410 if (maxFaces <= 0) {
9411 supportedFaceDetectMode = 0;
9412 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009413 Vector<uint8_t> availableFaceDetectModes;
9414 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9415 if (supportedFaceDetectMode == 1) {
9416 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9417 } else if (supportedFaceDetectMode == 2) {
9418 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9419 } else if (supportedFaceDetectMode == 3) {
9420 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9421 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9422 } else {
9423 maxFaces = 0;
9424 }
9425 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9426 availableFaceDetectModes.array(),
9427 availableFaceDetectModes.size());
9428 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9429 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009430 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9431 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9432 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009433
9434 int32_t exposureCompensationRange[] = {
9435 gCamCapability[cameraId]->exposure_compensation_min,
9436 gCamCapability[cameraId]->exposure_compensation_max};
9437 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9438 exposureCompensationRange,
9439 sizeof(exposureCompensationRange)/sizeof(int32_t));
9440
9441 uint8_t lensFacing = (facingBack) ?
9442 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9443 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9444
9445 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9446 available_thumbnail_sizes,
9447 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9448
9449 /*all sizes will be clubbed into this tag*/
9450 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9451 /*android.scaler.availableStreamConfigurations*/
9452 Vector<int32_t> available_stream_configs;
9453 cam_dimension_t active_array_dim;
9454 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9455 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009456
9457 /*advertise list of input dimensions supported based on below property.
9458 By default all sizes upto 5MP will be advertised.
9459 Note that the setprop resolution format should be WxH.
9460 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9461 To list all supported sizes, setprop needs to be set with "0x0" */
9462 cam_dimension_t minInputSize = {2592,1944}; //5MP
9463 memset(prop, 0, sizeof(prop));
9464 property_get("persist.camera.input.minsize", prop, "2592x1944");
9465 if (strlen(prop) > 0) {
9466 char *saveptr = NULL;
9467 char *token = strtok_r(prop, "x", &saveptr);
9468 if (token != NULL) {
9469 minInputSize.width = atoi(token);
9470 }
9471 token = strtok_r(NULL, "x", &saveptr);
9472 if (token != NULL) {
9473 minInputSize.height = atoi(token);
9474 }
9475 }
9476
Thierry Strudel3d639192016-09-09 11:52:26 -07009477 /* Add input/output stream configurations for each scalar formats*/
9478 for (size_t j = 0; j < scalar_formats_count; j++) {
9479 switch (scalar_formats[j]) {
9480 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9481 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9482 case HAL_PIXEL_FORMAT_RAW10:
9483 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9484 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9485 addStreamConfig(available_stream_configs, scalar_formats[j],
9486 gCamCapability[cameraId]->raw_dim[i],
9487 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9488 }
9489 break;
9490 case HAL_PIXEL_FORMAT_BLOB:
9491 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9492 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9493 addStreamConfig(available_stream_configs, scalar_formats[j],
9494 gCamCapability[cameraId]->picture_sizes_tbl[i],
9495 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9496 }
9497 break;
9498 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9499 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9500 default:
9501 cam_dimension_t largest_picture_size;
9502 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9503 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9504 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9505 addStreamConfig(available_stream_configs, scalar_formats[j],
9506 gCamCapability[cameraId]->picture_sizes_tbl[i],
9507 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009508 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009509 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9510 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009511 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9512 >= minInputSize.width) || (gCamCapability[cameraId]->
9513 picture_sizes_tbl[i].height >= minInputSize.height)) {
9514 addStreamConfig(available_stream_configs, scalar_formats[j],
9515 gCamCapability[cameraId]->picture_sizes_tbl[i],
9516 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9517 }
9518 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009519 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009520
Thierry Strudel3d639192016-09-09 11:52:26 -07009521 break;
9522 }
9523 }
9524
9525 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9526 available_stream_configs.array(), available_stream_configs.size());
9527 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9528 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9529
9530 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9531 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9532
9533 /* android.scaler.availableMinFrameDurations */
9534 Vector<int64_t> available_min_durations;
9535 for (size_t j = 0; j < scalar_formats_count; j++) {
9536 switch (scalar_formats[j]) {
9537 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9538 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9539 case HAL_PIXEL_FORMAT_RAW10:
9540 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9541 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9542 available_min_durations.add(scalar_formats[j]);
9543 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9544 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9545 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9546 }
9547 break;
9548 default:
9549 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9550 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9551 available_min_durations.add(scalar_formats[j]);
9552 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9553 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9554 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9555 }
9556 break;
9557 }
9558 }
9559 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9560 available_min_durations.array(), available_min_durations.size());
9561
9562 Vector<int32_t> available_hfr_configs;
9563 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9564 int32_t fps = 0;
9565 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9566 case CAM_HFR_MODE_60FPS:
9567 fps = 60;
9568 break;
9569 case CAM_HFR_MODE_90FPS:
9570 fps = 90;
9571 break;
9572 case CAM_HFR_MODE_120FPS:
9573 fps = 120;
9574 break;
9575 case CAM_HFR_MODE_150FPS:
9576 fps = 150;
9577 break;
9578 case CAM_HFR_MODE_180FPS:
9579 fps = 180;
9580 break;
9581 case CAM_HFR_MODE_210FPS:
9582 fps = 210;
9583 break;
9584 case CAM_HFR_MODE_240FPS:
9585 fps = 240;
9586 break;
9587 case CAM_HFR_MODE_480FPS:
9588 fps = 480;
9589 break;
9590 case CAM_HFR_MODE_OFF:
9591 case CAM_HFR_MODE_MAX:
9592 default:
9593 break;
9594 }
9595
9596 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9597 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9598 /* For each HFR frame rate, need to advertise one variable fps range
9599 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9600 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9601 * set by the app. When video recording is started, [120, 120] is
9602 * set. This way sensor configuration does not change when recording
9603 * is started */
9604
9605 /* (width, height, fps_min, fps_max, batch_size_max) */
9606 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9607 j < MAX_SIZES_CNT; j++) {
9608 available_hfr_configs.add(
9609 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9610 available_hfr_configs.add(
9611 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9612 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9613 available_hfr_configs.add(fps);
9614 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9615
9616 /* (width, height, fps_min, fps_max, batch_size_max) */
9617 available_hfr_configs.add(
9618 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9619 available_hfr_configs.add(
9620 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9621 available_hfr_configs.add(fps);
9622 available_hfr_configs.add(fps);
9623 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9624 }
9625 }
9626 }
9627 //Advertise HFR capability only if the property is set
9628 memset(prop, 0, sizeof(prop));
9629 property_get("persist.camera.hal3hfr.enable", prop, "1");
9630 uint8_t hfrEnable = (uint8_t)atoi(prop);
9631
9632 if(hfrEnable && available_hfr_configs.array()) {
9633 staticInfo.update(
9634 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9635 available_hfr_configs.array(), available_hfr_configs.size());
9636 }
9637
9638 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9639 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9640 &max_jpeg_size, 1);
9641
9642 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9643 size_t size = 0;
9644 count = CAM_EFFECT_MODE_MAX;
9645 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9646 for (size_t i = 0; i < count; i++) {
9647 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9648 gCamCapability[cameraId]->supported_effects[i]);
9649 if (NAME_NOT_FOUND != val) {
9650 avail_effects[size] = (uint8_t)val;
9651 size++;
9652 }
9653 }
9654 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9655 avail_effects,
9656 size);
9657
9658 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9659 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9660 size_t supported_scene_modes_cnt = 0;
9661 count = CAM_SCENE_MODE_MAX;
9662 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9663 for (size_t i = 0; i < count; i++) {
9664 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9665 CAM_SCENE_MODE_OFF) {
9666 int val = lookupFwkName(SCENE_MODES_MAP,
9667 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9668 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009669
Thierry Strudel3d639192016-09-09 11:52:26 -07009670 if (NAME_NOT_FOUND != val) {
9671 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9672 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9673 supported_scene_modes_cnt++;
9674 }
9675 }
9676 }
9677 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9678 avail_scene_modes,
9679 supported_scene_modes_cnt);
9680
9681 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9682 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9683 supported_scene_modes_cnt,
9684 CAM_SCENE_MODE_MAX,
9685 scene_mode_overrides,
9686 supported_indexes,
9687 cameraId);
9688
9689 if (supported_scene_modes_cnt == 0) {
9690 supported_scene_modes_cnt = 1;
9691 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9692 }
9693
9694 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9695 scene_mode_overrides, supported_scene_modes_cnt * 3);
9696
9697 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9698 ANDROID_CONTROL_MODE_AUTO,
9699 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9700 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9701 available_control_modes,
9702 3);
9703
9704 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9705 size = 0;
9706 count = CAM_ANTIBANDING_MODE_MAX;
9707 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9708 for (size_t i = 0; i < count; i++) {
9709 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9710 gCamCapability[cameraId]->supported_antibandings[i]);
9711 if (NAME_NOT_FOUND != val) {
9712 avail_antibanding_modes[size] = (uint8_t)val;
9713 size++;
9714 }
9715
9716 }
9717 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9718 avail_antibanding_modes,
9719 size);
9720
9721 uint8_t avail_abberation_modes[] = {
9722 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9723 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9724 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9725 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9726 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9727 if (0 == count) {
9728 // If no aberration correction modes are available for a device, this advertise OFF mode
9729 size = 1;
9730 } else {
9731 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9732 // So, advertize all 3 modes if atleast any one mode is supported as per the
9733 // new M requirement
9734 size = 3;
9735 }
9736 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9737 avail_abberation_modes,
9738 size);
9739
9740 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9741 size = 0;
9742 count = CAM_FOCUS_MODE_MAX;
9743 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9744 for (size_t i = 0; i < count; i++) {
9745 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9746 gCamCapability[cameraId]->supported_focus_modes[i]);
9747 if (NAME_NOT_FOUND != val) {
9748 avail_af_modes[size] = (uint8_t)val;
9749 size++;
9750 }
9751 }
9752 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9753 avail_af_modes,
9754 size);
9755
9756 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9757 size = 0;
9758 count = CAM_WB_MODE_MAX;
9759 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9760 for (size_t i = 0; i < count; i++) {
9761 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9762 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9763 gCamCapability[cameraId]->supported_white_balances[i]);
9764 if (NAME_NOT_FOUND != val) {
9765 avail_awb_modes[size] = (uint8_t)val;
9766 size++;
9767 }
9768 }
9769 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9770 avail_awb_modes,
9771 size);
9772
9773 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9774 count = CAM_FLASH_FIRING_LEVEL_MAX;
9775 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9776 count);
9777 for (size_t i = 0; i < count; i++) {
9778 available_flash_levels[i] =
9779 gCamCapability[cameraId]->supported_firing_levels[i];
9780 }
9781 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9782 available_flash_levels, count);
9783
9784 uint8_t flashAvailable;
9785 if (gCamCapability[cameraId]->flash_available)
9786 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9787 else
9788 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9789 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9790 &flashAvailable, 1);
9791
9792 Vector<uint8_t> avail_ae_modes;
9793 count = CAM_AE_MODE_MAX;
9794 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9795 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009796 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9797 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9798 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9799 }
9800 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009801 }
9802 if (flashAvailable) {
9803 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9804 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9805 }
9806 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9807 avail_ae_modes.array(),
9808 avail_ae_modes.size());
9809
9810 int32_t sensitivity_range[2];
9811 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9812 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9813 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9814 sensitivity_range,
9815 sizeof(sensitivity_range) / sizeof(int32_t));
9816
9817 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9818 &gCamCapability[cameraId]->max_analog_sensitivity,
9819 1);
9820
9821 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9822 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9823 &sensor_orientation,
9824 1);
9825
9826 int32_t max_output_streams[] = {
9827 MAX_STALLING_STREAMS,
9828 MAX_PROCESSED_STREAMS,
9829 MAX_RAW_STREAMS};
9830 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9831 max_output_streams,
9832 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9833
9834 uint8_t avail_leds = 0;
9835 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9836 &avail_leds, 0);
9837
9838 uint8_t focus_dist_calibrated;
9839 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9840 gCamCapability[cameraId]->focus_dist_calibrated);
9841 if (NAME_NOT_FOUND != val) {
9842 focus_dist_calibrated = (uint8_t)val;
9843 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9844 &focus_dist_calibrated, 1);
9845 }
9846
9847 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9848 size = 0;
9849 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9850 MAX_TEST_PATTERN_CNT);
9851 for (size_t i = 0; i < count; i++) {
9852 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9853 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9854 if (NAME_NOT_FOUND != testpatternMode) {
9855 avail_testpattern_modes[size] = testpatternMode;
9856 size++;
9857 }
9858 }
9859 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9860 avail_testpattern_modes,
9861 size);
9862
9863 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9864 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9865 &max_pipeline_depth,
9866 1);
9867
9868 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9869 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9870 &partial_result_count,
9871 1);
9872
9873 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9874 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9875
9876 Vector<uint8_t> available_capabilities;
9877 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9878 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9879 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9880 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9881 if (supportBurst) {
9882 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9883 }
9884 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9885 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9886 if (hfrEnable && available_hfr_configs.array()) {
9887 available_capabilities.add(
9888 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9889 }
9890
9891 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9892 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9893 }
9894 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9895 available_capabilities.array(),
9896 available_capabilities.size());
9897
9898 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9899 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9900 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9901 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9902
9903 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9904 &aeLockAvailable, 1);
9905
9906 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9907 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9908 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9909 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9910
9911 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9912 &awbLockAvailable, 1);
9913
9914 int32_t max_input_streams = 1;
9915 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9916 &max_input_streams,
9917 1);
9918
9919 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9920 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9921 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9922 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9923 HAL_PIXEL_FORMAT_YCbCr_420_888};
9924 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9925 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9926
9927 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9928 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9929 &max_latency,
9930 1);
9931
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009932#ifndef USE_HAL_3_3
9933 int32_t isp_sensitivity_range[2];
9934 isp_sensitivity_range[0] =
9935 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9936 isp_sensitivity_range[1] =
9937 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9938 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9939 isp_sensitivity_range,
9940 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9941#endif
9942
Thierry Strudel3d639192016-09-09 11:52:26 -07009943 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9944 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9945 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9946 available_hot_pixel_modes,
9947 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9948
9949 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9950 ANDROID_SHADING_MODE_FAST,
9951 ANDROID_SHADING_MODE_HIGH_QUALITY};
9952 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9953 available_shading_modes,
9954 3);
9955
9956 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9957 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9958 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9959 available_lens_shading_map_modes,
9960 2);
9961
9962 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9963 ANDROID_EDGE_MODE_FAST,
9964 ANDROID_EDGE_MODE_HIGH_QUALITY,
9965 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9966 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9967 available_edge_modes,
9968 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9969
9970 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9971 ANDROID_NOISE_REDUCTION_MODE_FAST,
9972 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9973 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9974 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9975 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9976 available_noise_red_modes,
9977 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9978
9979 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9980 ANDROID_TONEMAP_MODE_FAST,
9981 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9982 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9983 available_tonemap_modes,
9984 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9985
9986 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9987 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9988 available_hot_pixel_map_modes,
9989 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9990
9991 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9992 gCamCapability[cameraId]->reference_illuminant1);
9993 if (NAME_NOT_FOUND != val) {
9994 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9995 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9996 }
9997
9998 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9999 gCamCapability[cameraId]->reference_illuminant2);
10000 if (NAME_NOT_FOUND != val) {
10001 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10002 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10003 }
10004
10005 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10006 (void *)gCamCapability[cameraId]->forward_matrix1,
10007 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10008
10009 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10010 (void *)gCamCapability[cameraId]->forward_matrix2,
10011 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10012
10013 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10014 (void *)gCamCapability[cameraId]->color_transform1,
10015 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10016
10017 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10018 (void *)gCamCapability[cameraId]->color_transform2,
10019 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10020
10021 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10022 (void *)gCamCapability[cameraId]->calibration_transform1,
10023 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10024
10025 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10026 (void *)gCamCapability[cameraId]->calibration_transform2,
10027 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10028
10029 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10030 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10031 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10032 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10033 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10034 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10035 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10036 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10037 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10038 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10039 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10040 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10041 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10042 ANDROID_JPEG_GPS_COORDINATES,
10043 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10044 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10045 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10046 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10047 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10048 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10049 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10050 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10051 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10052 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010053#ifndef USE_HAL_3_3
10054 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10055#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010056 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010057 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010058 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10059 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010060 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010061 /* DevCamDebug metadata request_keys_basic */
10062 DEVCAMDEBUG_META_ENABLE,
10063 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010064 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010065 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010066 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010067 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev666f5142017-06-02 16:47:04 +010010068 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010069 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010070
10071 size_t request_keys_cnt =
10072 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10073 Vector<int32_t> available_request_keys;
10074 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10075 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10076 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10077 }
10078
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010079 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010080 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010081 }
10082
Thierry Strudel3d639192016-09-09 11:52:26 -070010083 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10084 available_request_keys.array(), available_request_keys.size());
10085
10086 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10087 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10088 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10089 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10090 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10091 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10092 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10093 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10094 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10095 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10096 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10097 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10098 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10099 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10100 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10101 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10102 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010103 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010104 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10105 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10106 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010107 ANDROID_STATISTICS_FACE_SCORES,
10108#ifndef USE_HAL_3_3
10109 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10110#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010111 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010112 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010113 // DevCamDebug metadata result_keys_basic
10114 DEVCAMDEBUG_META_ENABLE,
10115 // DevCamDebug metadata result_keys AF
10116 DEVCAMDEBUG_AF_LENS_POSITION,
10117 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10118 DEVCAMDEBUG_AF_TOF_DISTANCE,
10119 DEVCAMDEBUG_AF_LUMA,
10120 DEVCAMDEBUG_AF_HAF_STATE,
10121 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10122 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10123 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10124 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10125 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10126 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10127 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10128 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10129 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10130 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10131 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10132 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10133 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10134 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10135 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10136 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10137 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10138 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10139 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10140 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10141 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10142 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10143 // DevCamDebug metadata result_keys AEC
10144 DEVCAMDEBUG_AEC_TARGET_LUMA,
10145 DEVCAMDEBUG_AEC_COMP_LUMA,
10146 DEVCAMDEBUG_AEC_AVG_LUMA,
10147 DEVCAMDEBUG_AEC_CUR_LUMA,
10148 DEVCAMDEBUG_AEC_LINECOUNT,
10149 DEVCAMDEBUG_AEC_REAL_GAIN,
10150 DEVCAMDEBUG_AEC_EXP_INDEX,
10151 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010152 // DevCamDebug metadata result_keys zzHDR
10153 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10154 DEVCAMDEBUG_AEC_L_LINECOUNT,
10155 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10156 DEVCAMDEBUG_AEC_S_LINECOUNT,
10157 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10158 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10159 // DevCamDebug metadata result_keys ADRC
10160 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10161 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10162 DEVCAMDEBUG_AEC_GTM_RATIO,
10163 DEVCAMDEBUG_AEC_LTM_RATIO,
10164 DEVCAMDEBUG_AEC_LA_RATIO,
10165 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010166 // DevCamDebug metadata result_keys AWB
10167 DEVCAMDEBUG_AWB_R_GAIN,
10168 DEVCAMDEBUG_AWB_G_GAIN,
10169 DEVCAMDEBUG_AWB_B_GAIN,
10170 DEVCAMDEBUG_AWB_CCT,
10171 DEVCAMDEBUG_AWB_DECISION,
10172 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010173 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10174 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10175 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010176 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010177 };
10178
Thierry Strudel3d639192016-09-09 11:52:26 -070010179 size_t result_keys_cnt =
10180 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10181
10182 Vector<int32_t> available_result_keys;
10183 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10184 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10185 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10186 }
10187 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10188 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10189 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10190 }
10191 if (supportedFaceDetectMode == 1) {
10192 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10193 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10194 } else if ((supportedFaceDetectMode == 2) ||
10195 (supportedFaceDetectMode == 3)) {
10196 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10197 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10198 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010199#ifndef USE_HAL_3_3
10200 if (hasBlackRegions) {
10201 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10202 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10203 }
10204#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010205
10206 if (gExposeEnableZslKey) {
10207 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10208 }
10209
Thierry Strudel3d639192016-09-09 11:52:26 -070010210 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10211 available_result_keys.array(), available_result_keys.size());
10212
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010213 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010214 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10215 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10216 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10217 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10218 ANDROID_SCALER_CROPPING_TYPE,
10219 ANDROID_SYNC_MAX_LATENCY,
10220 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10221 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10222 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10223 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10224 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10225 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10226 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10227 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10228 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10229 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10230 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10231 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10232 ANDROID_LENS_FACING,
10233 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10234 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10235 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10236 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10237 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10238 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10239 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10240 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10241 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10242 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10243 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10244 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10245 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10246 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10247 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10248 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10249 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10250 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10251 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10252 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010253 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010254 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10255 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10256 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10257 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10258 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10259 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10260 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10261 ANDROID_CONTROL_AVAILABLE_MODES,
10262 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10263 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10264 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10265 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010266 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10267#ifndef USE_HAL_3_3
10268 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10269 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10270#endif
10271 };
10272
10273 Vector<int32_t> available_characteristics_keys;
10274 available_characteristics_keys.appendArray(characteristics_keys_basic,
10275 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10276#ifndef USE_HAL_3_3
10277 if (hasBlackRegions) {
10278 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10279 }
10280#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010281
10282 if (0 <= indexPD) {
10283 int32_t depthKeys[] = {
10284 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10285 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10286 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10287 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10288 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10289 };
10290 available_characteristics_keys.appendArray(depthKeys,
10291 sizeof(depthKeys) / sizeof(depthKeys[0]));
10292 }
10293
Thierry Strudel3d639192016-09-09 11:52:26 -070010294 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010295 available_characteristics_keys.array(),
10296 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010297
10298 /*available stall durations depend on the hw + sw and will be different for different devices */
10299 /*have to add for raw after implementation*/
10300 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10301 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10302
10303 Vector<int64_t> available_stall_durations;
10304 for (uint32_t j = 0; j < stall_formats_count; j++) {
10305 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10306 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10307 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10308 available_stall_durations.add(stall_formats[j]);
10309 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10310 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10311 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10312 }
10313 } else {
10314 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10315 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10316 available_stall_durations.add(stall_formats[j]);
10317 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10318 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10319 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10320 }
10321 }
10322 }
10323 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10324 available_stall_durations.array(),
10325 available_stall_durations.size());
10326
10327 //QCAMERA3_OPAQUE_RAW
10328 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10329 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10330 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10331 case LEGACY_RAW:
10332 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10333 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10334 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10335 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10336 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10337 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10338 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10339 break;
10340 case MIPI_RAW:
10341 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10342 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10343 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10344 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10345 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10346 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10347 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10348 break;
10349 default:
10350 LOGE("unknown opaque_raw_format %d",
10351 gCamCapability[cameraId]->opaque_raw_fmt);
10352 break;
10353 }
10354 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10355
10356 Vector<int32_t> strides;
10357 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10358 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10359 cam_stream_buf_plane_info_t buf_planes;
10360 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10361 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10362 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10363 &gCamCapability[cameraId]->padding_info, &buf_planes);
10364 strides.add(buf_planes.plane_info.mp[0].stride);
10365 }
10366 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10367 strides.size());
10368
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010369 //TBD: remove the following line once backend advertises zzHDR in feature mask
10370 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010371 //Video HDR default
10372 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10373 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010374 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010375 int32_t vhdr_mode[] = {
10376 QCAMERA3_VIDEO_HDR_MODE_OFF,
10377 QCAMERA3_VIDEO_HDR_MODE_ON};
10378
10379 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10380 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10381 vhdr_mode, vhdr_mode_count);
10382 }
10383
Thierry Strudel3d639192016-09-09 11:52:26 -070010384 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10385 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10386 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10387
10388 uint8_t isMonoOnly =
10389 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10390 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10391 &isMonoOnly, 1);
10392
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010393#ifndef USE_HAL_3_3
10394 Vector<int32_t> opaque_size;
10395 for (size_t j = 0; j < scalar_formats_count; j++) {
10396 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10397 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10398 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10399 cam_stream_buf_plane_info_t buf_planes;
10400
10401 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10402 &gCamCapability[cameraId]->padding_info, &buf_planes);
10403
10404 if (rc == 0) {
10405 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10406 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10407 opaque_size.add(buf_planes.plane_info.frame_len);
10408 }else {
10409 LOGE("raw frame calculation failed!");
10410 }
10411 }
10412 }
10413 }
10414
10415 if ((opaque_size.size() > 0) &&
10416 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10417 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10418 else
10419 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10420#endif
10421
Thierry Strudel04e026f2016-10-10 11:27:36 -070010422 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10423 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10424 size = 0;
10425 count = CAM_IR_MODE_MAX;
10426 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10427 for (size_t i = 0; i < count; i++) {
10428 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10429 gCamCapability[cameraId]->supported_ir_modes[i]);
10430 if (NAME_NOT_FOUND != val) {
10431 avail_ir_modes[size] = (int32_t)val;
10432 size++;
10433 }
10434 }
10435 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10436 avail_ir_modes, size);
10437 }
10438
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010439 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10440 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10441 size = 0;
10442 count = CAM_AEC_CONVERGENCE_MAX;
10443 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10444 for (size_t i = 0; i < count; i++) {
10445 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10446 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10447 if (NAME_NOT_FOUND != val) {
10448 available_instant_aec_modes[size] = (int32_t)val;
10449 size++;
10450 }
10451 }
10452 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10453 available_instant_aec_modes, size);
10454 }
10455
Thierry Strudel54dc9782017-02-15 12:12:10 -080010456 int32_t sharpness_range[] = {
10457 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10458 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10459 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10460
10461 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10462 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10463 size = 0;
10464 count = CAM_BINNING_CORRECTION_MODE_MAX;
10465 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10466 for (size_t i = 0; i < count; i++) {
10467 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10468 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10469 gCamCapability[cameraId]->supported_binning_modes[i]);
10470 if (NAME_NOT_FOUND != val) {
10471 avail_binning_modes[size] = (int32_t)val;
10472 size++;
10473 }
10474 }
10475 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10476 avail_binning_modes, size);
10477 }
10478
10479 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10480 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10481 size = 0;
10482 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10483 for (size_t i = 0; i < count; i++) {
10484 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10485 gCamCapability[cameraId]->supported_aec_modes[i]);
10486 if (NAME_NOT_FOUND != val)
10487 available_aec_modes[size++] = val;
10488 }
10489 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10490 available_aec_modes, size);
10491 }
10492
10493 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10494 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10495 size = 0;
10496 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10497 for (size_t i = 0; i < count; i++) {
10498 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10499 gCamCapability[cameraId]->supported_iso_modes[i]);
10500 if (NAME_NOT_FOUND != val)
10501 available_iso_modes[size++] = val;
10502 }
10503 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10504 available_iso_modes, size);
10505 }
10506
10507 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010508 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010509 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10510 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10511 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10512
10513 int32_t available_saturation_range[4];
10514 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10515 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10516 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10517 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10518 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10519 available_saturation_range, 4);
10520
10521 uint8_t is_hdr_values[2];
10522 is_hdr_values[0] = 0;
10523 is_hdr_values[1] = 1;
10524 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10525 is_hdr_values, 2);
10526
10527 float is_hdr_confidence_range[2];
10528 is_hdr_confidence_range[0] = 0.0;
10529 is_hdr_confidence_range[1] = 1.0;
10530 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10531 is_hdr_confidence_range, 2);
10532
Emilian Peev0a972ef2017-03-16 10:25:53 +000010533 size_t eepromLength = strnlen(
10534 reinterpret_cast<const char *>(
10535 gCamCapability[cameraId]->eeprom_version_info),
10536 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10537 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010538 char easelInfo[] = ",E:N";
10539 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10540 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10541 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010542 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10543 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010544 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010545 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10546 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10547 }
10548
Thierry Strudel3d639192016-09-09 11:52:26 -070010549 gStaticMetadata[cameraId] = staticInfo.release();
10550 return rc;
10551}
10552
10553/*===========================================================================
10554 * FUNCTION : makeTable
10555 *
10556 * DESCRIPTION: make a table of sizes
10557 *
10558 * PARAMETERS :
10559 *
10560 *
10561 *==========================================================================*/
10562void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10563 size_t max_size, int32_t *sizeTable)
10564{
10565 size_t j = 0;
10566 if (size > max_size) {
10567 size = max_size;
10568 }
10569 for (size_t i = 0; i < size; i++) {
10570 sizeTable[j] = dimTable[i].width;
10571 sizeTable[j+1] = dimTable[i].height;
10572 j+=2;
10573 }
10574}
10575
10576/*===========================================================================
10577 * FUNCTION : makeFPSTable
10578 *
10579 * DESCRIPTION: make a table of fps ranges
10580 *
10581 * PARAMETERS :
10582 *
10583 *==========================================================================*/
10584void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10585 size_t max_size, int32_t *fpsRangesTable)
10586{
10587 size_t j = 0;
10588 if (size > max_size) {
10589 size = max_size;
10590 }
10591 for (size_t i = 0; i < size; i++) {
10592 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10593 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10594 j+=2;
10595 }
10596}
10597
10598/*===========================================================================
10599 * FUNCTION : makeOverridesList
10600 *
10601 * DESCRIPTION: make a list of scene mode overrides
10602 *
10603 * PARAMETERS :
10604 *
10605 *
10606 *==========================================================================*/
10607void QCamera3HardwareInterface::makeOverridesList(
10608 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10609 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10610{
10611 /*daemon will give a list of overrides for all scene modes.
10612 However we should send the fwk only the overrides for the scene modes
10613 supported by the framework*/
10614 size_t j = 0;
10615 if (size > max_size) {
10616 size = max_size;
10617 }
10618 size_t focus_count = CAM_FOCUS_MODE_MAX;
10619 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10620 focus_count);
10621 for (size_t i = 0; i < size; i++) {
10622 bool supt = false;
10623 size_t index = supported_indexes[i];
10624 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10625 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10626 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10627 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10628 overridesTable[index].awb_mode);
10629 if (NAME_NOT_FOUND != val) {
10630 overridesList[j+1] = (uint8_t)val;
10631 }
10632 uint8_t focus_override = overridesTable[index].af_mode;
10633 for (size_t k = 0; k < focus_count; k++) {
10634 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10635 supt = true;
10636 break;
10637 }
10638 }
10639 if (supt) {
10640 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10641 focus_override);
10642 if (NAME_NOT_FOUND != val) {
10643 overridesList[j+2] = (uint8_t)val;
10644 }
10645 } else {
10646 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10647 }
10648 j+=3;
10649 }
10650}
10651
10652/*===========================================================================
10653 * FUNCTION : filterJpegSizes
10654 *
10655 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10656 * could be downscaled to
10657 *
10658 * PARAMETERS :
10659 *
10660 * RETURN : length of jpegSizes array
10661 *==========================================================================*/
10662
10663size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10664 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10665 uint8_t downscale_factor)
10666{
10667 if (0 == downscale_factor) {
10668 downscale_factor = 1;
10669 }
10670
10671 int32_t min_width = active_array_size.width / downscale_factor;
10672 int32_t min_height = active_array_size.height / downscale_factor;
10673 size_t jpegSizesCnt = 0;
10674 if (processedSizesCnt > maxCount) {
10675 processedSizesCnt = maxCount;
10676 }
10677 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10678 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10679 jpegSizes[jpegSizesCnt] = processedSizes[i];
10680 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10681 jpegSizesCnt += 2;
10682 }
10683 }
10684 return jpegSizesCnt;
10685}
10686
10687/*===========================================================================
10688 * FUNCTION : computeNoiseModelEntryS
10689 *
10690 * DESCRIPTION: function to map a given sensitivity to the S noise
10691 * model parameters in the DNG noise model.
10692 *
10693 * PARAMETERS : sens : the sensor sensitivity
10694 *
10695 ** RETURN : S (sensor amplification) noise
10696 *
10697 *==========================================================================*/
10698double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10699 double s = gCamCapability[mCameraId]->gradient_S * sens +
10700 gCamCapability[mCameraId]->offset_S;
10701 return ((s < 0.0) ? 0.0 : s);
10702}
10703
10704/*===========================================================================
10705 * FUNCTION : computeNoiseModelEntryO
10706 *
10707 * DESCRIPTION: function to map a given sensitivity to the O noise
10708 * model parameters in the DNG noise model.
10709 *
10710 * PARAMETERS : sens : the sensor sensitivity
10711 *
10712 ** RETURN : O (sensor readout) noise
10713 *
10714 *==========================================================================*/
10715double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10716 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10717 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10718 1.0 : (1.0 * sens / max_analog_sens);
10719 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10720 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10721 return ((o < 0.0) ? 0.0 : o);
10722}
10723
10724/*===========================================================================
10725 * FUNCTION : getSensorSensitivity
10726 *
10727 * DESCRIPTION: convert iso_mode to an integer value
10728 *
10729 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10730 *
10731 ** RETURN : sensitivity supported by sensor
10732 *
10733 *==========================================================================*/
10734int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10735{
10736 int32_t sensitivity;
10737
10738 switch (iso_mode) {
10739 case CAM_ISO_MODE_100:
10740 sensitivity = 100;
10741 break;
10742 case CAM_ISO_MODE_200:
10743 sensitivity = 200;
10744 break;
10745 case CAM_ISO_MODE_400:
10746 sensitivity = 400;
10747 break;
10748 case CAM_ISO_MODE_800:
10749 sensitivity = 800;
10750 break;
10751 case CAM_ISO_MODE_1600:
10752 sensitivity = 1600;
10753 break;
10754 default:
10755 sensitivity = -1;
10756 break;
10757 }
10758 return sensitivity;
10759}
10760
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010761int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010762 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010763 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10764 // to connect to Easel.
10765 bool doNotpowerOnEasel =
10766 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10767
10768 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010769 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10770 return OK;
10771 }
10772
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010773 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010774 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010775 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010776 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010777 return res;
10778 }
10779
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010780 EaselManagerClientOpened = true;
10781
10782 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010783 if (res != OK) {
10784 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10785 }
10786
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010787 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010788 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010789
10790 // Expose enableZsl key only when HDR+ mode is enabled.
10791 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010792 }
10793
10794 return OK;
10795}
10796
Thierry Strudel3d639192016-09-09 11:52:26 -070010797/*===========================================================================
10798 * FUNCTION : getCamInfo
10799 *
10800 * DESCRIPTION: query camera capabilities
10801 *
10802 * PARAMETERS :
10803 * @cameraId : camera Id
10804 * @info : camera info struct to be filled in with camera capabilities
10805 *
10806 * RETURN : int type of status
10807 * NO_ERROR -- success
10808 * none-zero failure code
10809 *==========================================================================*/
10810int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10811 struct camera_info *info)
10812{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010813 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010814 int rc = 0;
10815
10816 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010817
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010818 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070010819 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010820 rc = initHdrPlusClientLocked();
10821 if (rc != OK) {
10822 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10823 pthread_mutex_unlock(&gCamLock);
10824 return rc;
10825 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010826 }
10827
Thierry Strudel3d639192016-09-09 11:52:26 -070010828 if (NULL == gCamCapability[cameraId]) {
10829 rc = initCapabilities(cameraId);
10830 if (rc < 0) {
10831 pthread_mutex_unlock(&gCamLock);
10832 return rc;
10833 }
10834 }
10835
10836 if (NULL == gStaticMetadata[cameraId]) {
10837 rc = initStaticMetadata(cameraId);
10838 if (rc < 0) {
10839 pthread_mutex_unlock(&gCamLock);
10840 return rc;
10841 }
10842 }
10843
10844 switch(gCamCapability[cameraId]->position) {
10845 case CAM_POSITION_BACK:
10846 case CAM_POSITION_BACK_AUX:
10847 info->facing = CAMERA_FACING_BACK;
10848 break;
10849
10850 case CAM_POSITION_FRONT:
10851 case CAM_POSITION_FRONT_AUX:
10852 info->facing = CAMERA_FACING_FRONT;
10853 break;
10854
10855 default:
10856 LOGE("Unknown position type %d for camera id:%d",
10857 gCamCapability[cameraId]->position, cameraId);
10858 rc = -1;
10859 break;
10860 }
10861
10862
10863 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010864#ifndef USE_HAL_3_3
10865 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10866#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010867 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010868#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010869 info->static_camera_characteristics = gStaticMetadata[cameraId];
10870
10871 //For now assume both cameras can operate independently.
10872 info->conflicting_devices = NULL;
10873 info->conflicting_devices_length = 0;
10874
10875 //resource cost is 100 * MIN(1.0, m/M),
10876 //where m is throughput requirement with maximum stream configuration
10877 //and M is CPP maximum throughput.
10878 float max_fps = 0.0;
10879 for (uint32_t i = 0;
10880 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10881 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10882 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10883 }
10884 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10885 gCamCapability[cameraId]->active_array_size.width *
10886 gCamCapability[cameraId]->active_array_size.height * max_fps /
10887 gCamCapability[cameraId]->max_pixel_bandwidth;
10888 info->resource_cost = 100 * MIN(1.0, ratio);
10889 LOGI("camera %d resource cost is %d", cameraId,
10890 info->resource_cost);
10891
10892 pthread_mutex_unlock(&gCamLock);
10893 return rc;
10894}
10895
10896/*===========================================================================
10897 * FUNCTION : translateCapabilityToMetadata
10898 *
10899 * DESCRIPTION: translate the capability into camera_metadata_t
10900 *
10901 * PARAMETERS : type of the request
10902 *
10903 *
10904 * RETURN : success: camera_metadata_t*
10905 * failure: NULL
10906 *
10907 *==========================================================================*/
10908camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10909{
10910 if (mDefaultMetadata[type] != NULL) {
10911 return mDefaultMetadata[type];
10912 }
10913 //first time we are handling this request
10914 //fill up the metadata structure using the wrapper class
10915 CameraMetadata settings;
10916 //translate from cam_capability_t to camera_metadata_tag_t
10917 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10918 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10919 int32_t defaultRequestID = 0;
10920 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10921
10922 /* OIS disable */
10923 char ois_prop[PROPERTY_VALUE_MAX];
10924 memset(ois_prop, 0, sizeof(ois_prop));
10925 property_get("persist.camera.ois.disable", ois_prop, "0");
10926 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10927
10928 /* Force video to use OIS */
10929 char videoOisProp[PROPERTY_VALUE_MAX];
10930 memset(videoOisProp, 0, sizeof(videoOisProp));
10931 property_get("persist.camera.ois.video", videoOisProp, "1");
10932 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010933
10934 // Hybrid AE enable/disable
10935 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10936 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10937 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10938 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10939
Thierry Strudel3d639192016-09-09 11:52:26 -070010940 uint8_t controlIntent = 0;
10941 uint8_t focusMode;
10942 uint8_t vsMode;
10943 uint8_t optStabMode;
10944 uint8_t cacMode;
10945 uint8_t edge_mode;
10946 uint8_t noise_red_mode;
10947 uint8_t tonemap_mode;
10948 bool highQualityModeEntryAvailable = FALSE;
10949 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010950 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010951 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10952 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010953 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010954 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010955 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010956
Thierry Strudel3d639192016-09-09 11:52:26 -070010957 switch (type) {
10958 case CAMERA3_TEMPLATE_PREVIEW:
10959 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10960 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10961 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10962 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10963 edge_mode = ANDROID_EDGE_MODE_FAST;
10964 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10965 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10966 break;
10967 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10968 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10969 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10970 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10971 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10972 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10973 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10974 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10975 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10976 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10977 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10978 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10979 highQualityModeEntryAvailable = TRUE;
10980 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10981 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10982 fastModeEntryAvailable = TRUE;
10983 }
10984 }
10985 if (highQualityModeEntryAvailable) {
10986 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10987 } else if (fastModeEntryAvailable) {
10988 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10989 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010990 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10991 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10992 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010993 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010994 break;
10995 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10996 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10997 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10998 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010999 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11000 edge_mode = ANDROID_EDGE_MODE_FAST;
11001 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11002 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11003 if (forceVideoOis)
11004 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11005 break;
11006 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11007 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11008 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11009 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011010 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11011 edge_mode = ANDROID_EDGE_MODE_FAST;
11012 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11013 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11014 if (forceVideoOis)
11015 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11016 break;
11017 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11018 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11019 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11020 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11021 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11022 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11023 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11024 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11025 break;
11026 case CAMERA3_TEMPLATE_MANUAL:
11027 edge_mode = ANDROID_EDGE_MODE_FAST;
11028 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11029 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11030 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11031 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11032 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11033 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11034 break;
11035 default:
11036 edge_mode = ANDROID_EDGE_MODE_FAST;
11037 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11038 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11039 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11040 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11041 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11042 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11043 break;
11044 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011045 // Set CAC to OFF if underlying device doesn't support
11046 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11047 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11048 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011049 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11050 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11051 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11052 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11053 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11054 }
11055 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011056 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011057 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011058
11059 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11060 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11061 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11062 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11063 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11064 || ois_disable)
11065 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11066 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011067 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011068
11069 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11070 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11071
11072 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11073 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11074
11075 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11076 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11077
11078 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11079 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11080
11081 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11082 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11083
11084 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11085 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11086
11087 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11088 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11089
11090 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11091 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11092
11093 /*flash*/
11094 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11095 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11096
11097 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11098 settings.update(ANDROID_FLASH_FIRING_POWER,
11099 &flashFiringLevel, 1);
11100
11101 /* lens */
11102 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11103 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11104
11105 if (gCamCapability[mCameraId]->filter_densities_count) {
11106 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11107 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11108 gCamCapability[mCameraId]->filter_densities_count);
11109 }
11110
11111 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11112 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11113
Thierry Strudel3d639192016-09-09 11:52:26 -070011114 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11115 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11116
11117 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11118 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11119
11120 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11121 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11122
11123 /* face detection (default to OFF) */
11124 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11125 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11126
Thierry Strudel54dc9782017-02-15 12:12:10 -080011127 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11128 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011129
11130 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11131 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11132
11133 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11134 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11135
Thierry Strudel3d639192016-09-09 11:52:26 -070011136
11137 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11138 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11139
11140 /* Exposure time(Update the Min Exposure Time)*/
11141 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11142 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11143
11144 /* frame duration */
11145 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11146 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11147
11148 /* sensitivity */
11149 static const int32_t default_sensitivity = 100;
11150 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011151#ifndef USE_HAL_3_3
11152 static const int32_t default_isp_sensitivity =
11153 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11154 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11155#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011156
11157 /*edge mode*/
11158 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11159
11160 /*noise reduction mode*/
11161 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11162
11163 /*color correction mode*/
11164 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11165 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11166
11167 /*transform matrix mode*/
11168 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11169
11170 int32_t scaler_crop_region[4];
11171 scaler_crop_region[0] = 0;
11172 scaler_crop_region[1] = 0;
11173 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11174 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11175 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11176
11177 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11178 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11179
11180 /*focus distance*/
11181 float focus_distance = 0.0;
11182 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11183
11184 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011185 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011186 float max_range = 0.0;
11187 float max_fixed_fps = 0.0;
11188 int32_t fps_range[2] = {0, 0};
11189 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11190 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011191 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11192 TEMPLATE_MAX_PREVIEW_FPS) {
11193 continue;
11194 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011195 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11196 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11197 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11198 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11199 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11200 if (range > max_range) {
11201 fps_range[0] =
11202 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11203 fps_range[1] =
11204 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11205 max_range = range;
11206 }
11207 } else {
11208 if (range < 0.01 && max_fixed_fps <
11209 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11210 fps_range[0] =
11211 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11212 fps_range[1] =
11213 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11214 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11215 }
11216 }
11217 }
11218 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11219
11220 /*precapture trigger*/
11221 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11222 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11223
11224 /*af trigger*/
11225 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11226 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11227
11228 /* ae & af regions */
11229 int32_t active_region[] = {
11230 gCamCapability[mCameraId]->active_array_size.left,
11231 gCamCapability[mCameraId]->active_array_size.top,
11232 gCamCapability[mCameraId]->active_array_size.left +
11233 gCamCapability[mCameraId]->active_array_size.width,
11234 gCamCapability[mCameraId]->active_array_size.top +
11235 gCamCapability[mCameraId]->active_array_size.height,
11236 0};
11237 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11238 sizeof(active_region) / sizeof(active_region[0]));
11239 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11240 sizeof(active_region) / sizeof(active_region[0]));
11241
11242 /* black level lock */
11243 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11244 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11245
Thierry Strudel3d639192016-09-09 11:52:26 -070011246 //special defaults for manual template
11247 if (type == CAMERA3_TEMPLATE_MANUAL) {
11248 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11249 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11250
11251 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11252 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11253
11254 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11255 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11256
11257 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11258 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11259
11260 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11261 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11262
11263 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11264 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11265 }
11266
11267
11268 /* TNR
11269 * We'll use this location to determine which modes TNR will be set.
11270 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11271 * This is not to be confused with linking on a per stream basis that decision
11272 * is still on per-session basis and will be handled as part of config stream
11273 */
11274 uint8_t tnr_enable = 0;
11275
11276 if (m_bTnrPreview || m_bTnrVideo) {
11277
11278 switch (type) {
11279 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11280 tnr_enable = 1;
11281 break;
11282
11283 default:
11284 tnr_enable = 0;
11285 break;
11286 }
11287
11288 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11289 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11290 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11291
11292 LOGD("TNR:%d with process plate %d for template:%d",
11293 tnr_enable, tnr_process_type, type);
11294 }
11295
11296 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011297 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011298 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11299
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011300 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011301 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11302
Shuzhen Wang920ea402017-05-03 08:49:39 -070011303 uint8_t related_camera_id = mCameraId;
11304 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011305
11306 /* CDS default */
11307 char prop[PROPERTY_VALUE_MAX];
11308 memset(prop, 0, sizeof(prop));
11309 property_get("persist.camera.CDS", prop, "Auto");
11310 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11311 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11312 if (CAM_CDS_MODE_MAX == cds_mode) {
11313 cds_mode = CAM_CDS_MODE_AUTO;
11314 }
11315
11316 /* Disabling CDS in templates which have TNR enabled*/
11317 if (tnr_enable)
11318 cds_mode = CAM_CDS_MODE_OFF;
11319
11320 int32_t mode = cds_mode;
11321 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011322
Thierry Strudel269c81a2016-10-12 12:13:59 -070011323 /* Manual Convergence AEC Speed is disabled by default*/
11324 float default_aec_speed = 0;
11325 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11326
11327 /* Manual Convergence AWB Speed is disabled by default*/
11328 float default_awb_speed = 0;
11329 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11330
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011331 // Set instant AEC to normal convergence by default
11332 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11333 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11334
Shuzhen Wang19463d72016-03-08 11:09:52 -080011335 /* hybrid ae */
11336 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11337
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011338 if (gExposeEnableZslKey) {
11339 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11340 }
11341
Thierry Strudel3d639192016-09-09 11:52:26 -070011342 mDefaultMetadata[type] = settings.release();
11343
11344 return mDefaultMetadata[type];
11345}
11346
11347/*===========================================================================
11348 * FUNCTION : setFrameParameters
11349 *
11350 * DESCRIPTION: set parameters per frame as requested in the metadata from
11351 * framework
11352 *
11353 * PARAMETERS :
11354 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011355 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011356 * @blob_request: Whether this request is a blob request or not
11357 *
11358 * RETURN : success: NO_ERROR
11359 * failure:
11360 *==========================================================================*/
11361int QCamera3HardwareInterface::setFrameParameters(
11362 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011363 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011364 int blob_request,
11365 uint32_t snapshotStreamId)
11366{
11367 /*translate from camera_metadata_t type to parm_type_t*/
11368 int rc = 0;
11369 int32_t hal_version = CAM_HAL_V3;
11370
11371 clear_metadata_buffer(mParameters);
11372 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11373 LOGE("Failed to set hal version in the parameters");
11374 return BAD_VALUE;
11375 }
11376
11377 /*we need to update the frame number in the parameters*/
11378 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11379 request->frame_number)) {
11380 LOGE("Failed to set the frame number in the parameters");
11381 return BAD_VALUE;
11382 }
11383
11384 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011385 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011386 LOGE("Failed to set stream type mask in the parameters");
11387 return BAD_VALUE;
11388 }
11389
11390 if (mUpdateDebugLevel) {
11391 uint32_t dummyDebugLevel = 0;
11392 /* The value of dummyDebugLevel is irrelavent. On
11393 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11394 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11395 dummyDebugLevel)) {
11396 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11397 return BAD_VALUE;
11398 }
11399 mUpdateDebugLevel = false;
11400 }
11401
11402 if(request->settings != NULL){
11403 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11404 if (blob_request)
11405 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11406 }
11407
11408 return rc;
11409}
11410
11411/*===========================================================================
11412 * FUNCTION : setReprocParameters
11413 *
11414 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11415 * return it.
11416 *
11417 * PARAMETERS :
11418 * @request : request that needs to be serviced
11419 *
11420 * RETURN : success: NO_ERROR
11421 * failure:
11422 *==========================================================================*/
11423int32_t QCamera3HardwareInterface::setReprocParameters(
11424 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11425 uint32_t snapshotStreamId)
11426{
11427 /*translate from camera_metadata_t type to parm_type_t*/
11428 int rc = 0;
11429
11430 if (NULL == request->settings){
11431 LOGE("Reprocess settings cannot be NULL");
11432 return BAD_VALUE;
11433 }
11434
11435 if (NULL == reprocParam) {
11436 LOGE("Invalid reprocessing metadata buffer");
11437 return BAD_VALUE;
11438 }
11439 clear_metadata_buffer(reprocParam);
11440
11441 /*we need to update the frame number in the parameters*/
11442 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11443 request->frame_number)) {
11444 LOGE("Failed to set the frame number in the parameters");
11445 return BAD_VALUE;
11446 }
11447
11448 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11449 if (rc < 0) {
11450 LOGE("Failed to translate reproc request");
11451 return rc;
11452 }
11453
11454 CameraMetadata frame_settings;
11455 frame_settings = request->settings;
11456 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11457 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11458 int32_t *crop_count =
11459 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11460 int32_t *crop_data =
11461 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11462 int32_t *roi_map =
11463 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11464 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11465 cam_crop_data_t crop_meta;
11466 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11467 crop_meta.num_of_streams = 1;
11468 crop_meta.crop_info[0].crop.left = crop_data[0];
11469 crop_meta.crop_info[0].crop.top = crop_data[1];
11470 crop_meta.crop_info[0].crop.width = crop_data[2];
11471 crop_meta.crop_info[0].crop.height = crop_data[3];
11472
11473 crop_meta.crop_info[0].roi_map.left =
11474 roi_map[0];
11475 crop_meta.crop_info[0].roi_map.top =
11476 roi_map[1];
11477 crop_meta.crop_info[0].roi_map.width =
11478 roi_map[2];
11479 crop_meta.crop_info[0].roi_map.height =
11480 roi_map[3];
11481
11482 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11483 rc = BAD_VALUE;
11484 }
11485 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11486 request->input_buffer->stream,
11487 crop_meta.crop_info[0].crop.left,
11488 crop_meta.crop_info[0].crop.top,
11489 crop_meta.crop_info[0].crop.width,
11490 crop_meta.crop_info[0].crop.height);
11491 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11492 request->input_buffer->stream,
11493 crop_meta.crop_info[0].roi_map.left,
11494 crop_meta.crop_info[0].roi_map.top,
11495 crop_meta.crop_info[0].roi_map.width,
11496 crop_meta.crop_info[0].roi_map.height);
11497 } else {
11498 LOGE("Invalid reprocess crop count %d!", *crop_count);
11499 }
11500 } else {
11501 LOGE("No crop data from matching output stream");
11502 }
11503
11504 /* These settings are not needed for regular requests so handle them specially for
11505 reprocess requests; information needed for EXIF tags */
11506 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11507 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11508 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11509 if (NAME_NOT_FOUND != val) {
11510 uint32_t flashMode = (uint32_t)val;
11511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11512 rc = BAD_VALUE;
11513 }
11514 } else {
11515 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11516 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11517 }
11518 } else {
11519 LOGH("No flash mode in reprocess settings");
11520 }
11521
11522 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11523 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11524 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11525 rc = BAD_VALUE;
11526 }
11527 } else {
11528 LOGH("No flash state in reprocess settings");
11529 }
11530
11531 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11532 uint8_t *reprocessFlags =
11533 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11534 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11535 *reprocessFlags)) {
11536 rc = BAD_VALUE;
11537 }
11538 }
11539
Thierry Strudel54dc9782017-02-15 12:12:10 -080011540 // Add exif debug data to internal metadata
11541 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11542 mm_jpeg_debug_exif_params_t *debug_params =
11543 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11544 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11545 // AE
11546 if (debug_params->ae_debug_params_valid == TRUE) {
11547 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11548 debug_params->ae_debug_params);
11549 }
11550 // AWB
11551 if (debug_params->awb_debug_params_valid == TRUE) {
11552 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11553 debug_params->awb_debug_params);
11554 }
11555 // AF
11556 if (debug_params->af_debug_params_valid == TRUE) {
11557 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11558 debug_params->af_debug_params);
11559 }
11560 // ASD
11561 if (debug_params->asd_debug_params_valid == TRUE) {
11562 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11563 debug_params->asd_debug_params);
11564 }
11565 // Stats
11566 if (debug_params->stats_debug_params_valid == TRUE) {
11567 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11568 debug_params->stats_debug_params);
11569 }
11570 // BE Stats
11571 if (debug_params->bestats_debug_params_valid == TRUE) {
11572 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11573 debug_params->bestats_debug_params);
11574 }
11575 // BHIST
11576 if (debug_params->bhist_debug_params_valid == TRUE) {
11577 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11578 debug_params->bhist_debug_params);
11579 }
11580 // 3A Tuning
11581 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11582 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11583 debug_params->q3a_tuning_debug_params);
11584 }
11585 }
11586
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011587 // Add metadata which reprocess needs
11588 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11589 cam_reprocess_info_t *repro_info =
11590 (cam_reprocess_info_t *)frame_settings.find
11591 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011592 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011593 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011594 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011595 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011596 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011597 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011598 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011599 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011600 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011601 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011602 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011603 repro_info->pipeline_flip);
11604 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11605 repro_info->af_roi);
11606 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11607 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011608 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11609 CAM_INTF_PARM_ROTATION metadata then has been added in
11610 translateToHalMetadata. HAL need to keep this new rotation
11611 metadata. Otherwise, the old rotation info saved in the vendor tag
11612 would be used */
11613 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11614 CAM_INTF_PARM_ROTATION, reprocParam) {
11615 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11616 } else {
11617 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011618 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011619 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011620 }
11621
11622 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11623 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11624 roi.width and roi.height would be the final JPEG size.
11625 For now, HAL only checks this for reprocess request */
11626 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11627 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11628 uint8_t *enable =
11629 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11630 if (*enable == TRUE) {
11631 int32_t *crop_data =
11632 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11633 cam_stream_crop_info_t crop_meta;
11634 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11635 crop_meta.stream_id = 0;
11636 crop_meta.crop.left = crop_data[0];
11637 crop_meta.crop.top = crop_data[1];
11638 crop_meta.crop.width = crop_data[2];
11639 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011640 // The JPEG crop roi should match cpp output size
11641 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11642 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11643 crop_meta.roi_map.left = 0;
11644 crop_meta.roi_map.top = 0;
11645 crop_meta.roi_map.width = cpp_crop->crop.width;
11646 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011647 }
11648 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11649 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011650 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011651 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011652 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11653 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011654 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011655 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11656
11657 // Add JPEG scale information
11658 cam_dimension_t scale_dim;
11659 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11660 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11661 int32_t *roi =
11662 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11663 scale_dim.width = roi[2];
11664 scale_dim.height = roi[3];
11665 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11666 scale_dim);
11667 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11668 scale_dim.width, scale_dim.height, mCameraId);
11669 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011670 }
11671 }
11672
11673 return rc;
11674}
11675
11676/*===========================================================================
11677 * FUNCTION : saveRequestSettings
11678 *
11679 * DESCRIPTION: Add any settings that might have changed to the request settings
11680 * and save the settings to be applied on the frame
11681 *
11682 * PARAMETERS :
11683 * @jpegMetadata : the extracted and/or modified jpeg metadata
11684 * @request : request with initial settings
11685 *
11686 * RETURN :
11687 * camera_metadata_t* : pointer to the saved request settings
11688 *==========================================================================*/
11689camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11690 const CameraMetadata &jpegMetadata,
11691 camera3_capture_request_t *request)
11692{
11693 camera_metadata_t *resultMetadata;
11694 CameraMetadata camMetadata;
11695 camMetadata = request->settings;
11696
11697 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11698 int32_t thumbnail_size[2];
11699 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11700 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11701 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11702 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11703 }
11704
11705 if (request->input_buffer != NULL) {
11706 uint8_t reprocessFlags = 1;
11707 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11708 (uint8_t*)&reprocessFlags,
11709 sizeof(reprocessFlags));
11710 }
11711
11712 resultMetadata = camMetadata.release();
11713 return resultMetadata;
11714}
11715
11716/*===========================================================================
11717 * FUNCTION : setHalFpsRange
11718 *
11719 * DESCRIPTION: set FPS range parameter
11720 *
11721 *
11722 * PARAMETERS :
11723 * @settings : Metadata from framework
11724 * @hal_metadata: Metadata buffer
11725 *
11726 *
11727 * RETURN : success: NO_ERROR
11728 * failure:
11729 *==========================================================================*/
11730int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11731 metadata_buffer_t *hal_metadata)
11732{
11733 int32_t rc = NO_ERROR;
11734 cam_fps_range_t fps_range;
11735 fps_range.min_fps = (float)
11736 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11737 fps_range.max_fps = (float)
11738 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11739 fps_range.video_min_fps = fps_range.min_fps;
11740 fps_range.video_max_fps = fps_range.max_fps;
11741
11742 LOGD("aeTargetFpsRange fps: [%f %f]",
11743 fps_range.min_fps, fps_range.max_fps);
11744 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11745 * follows:
11746 * ---------------------------------------------------------------|
11747 * Video stream is absent in configure_streams |
11748 * (Camcorder preview before the first video record |
11749 * ---------------------------------------------------------------|
11750 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11751 * | | | vid_min/max_fps|
11752 * ---------------------------------------------------------------|
11753 * NO | [ 30, 240] | 240 | [240, 240] |
11754 * |-------------|-------------|----------------|
11755 * | [240, 240] | 240 | [240, 240] |
11756 * ---------------------------------------------------------------|
11757 * Video stream is present in configure_streams |
11758 * ---------------------------------------------------------------|
11759 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11760 * | | | vid_min/max_fps|
11761 * ---------------------------------------------------------------|
11762 * NO | [ 30, 240] | 240 | [240, 240] |
11763 * (camcorder prev |-------------|-------------|----------------|
11764 * after video rec | [240, 240] | 240 | [240, 240] |
11765 * is stopped) | | | |
11766 * ---------------------------------------------------------------|
11767 * YES | [ 30, 240] | 240 | [240, 240] |
11768 * |-------------|-------------|----------------|
11769 * | [240, 240] | 240 | [240, 240] |
11770 * ---------------------------------------------------------------|
11771 * When Video stream is absent in configure_streams,
11772 * preview fps = sensor_fps / batchsize
11773 * Eg: for 240fps at batchSize 4, preview = 60fps
11774 * for 120fps at batchSize 4, preview = 30fps
11775 *
11776 * When video stream is present in configure_streams, preview fps is as per
11777 * the ratio of preview buffers to video buffers requested in process
11778 * capture request
11779 */
11780 mBatchSize = 0;
11781 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11782 fps_range.min_fps = fps_range.video_max_fps;
11783 fps_range.video_min_fps = fps_range.video_max_fps;
11784 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11785 fps_range.max_fps);
11786 if (NAME_NOT_FOUND != val) {
11787 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11788 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11789 return BAD_VALUE;
11790 }
11791
11792 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11793 /* If batchmode is currently in progress and the fps changes,
11794 * set the flag to restart the sensor */
11795 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11796 (mHFRVideoFps != fps_range.max_fps)) {
11797 mNeedSensorRestart = true;
11798 }
11799 mHFRVideoFps = fps_range.max_fps;
11800 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11801 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11802 mBatchSize = MAX_HFR_BATCH_SIZE;
11803 }
11804 }
11805 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11806
11807 }
11808 } else {
11809 /* HFR mode is session param in backend/ISP. This should be reset when
11810 * in non-HFR mode */
11811 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11812 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11813 return BAD_VALUE;
11814 }
11815 }
11816 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11817 return BAD_VALUE;
11818 }
11819 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11820 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11821 return rc;
11822}
11823
11824/*===========================================================================
11825 * FUNCTION : translateToHalMetadata
11826 *
11827 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11828 *
11829 *
11830 * PARAMETERS :
11831 * @request : request sent from framework
11832 *
11833 *
11834 * RETURN : success: NO_ERROR
11835 * failure:
11836 *==========================================================================*/
11837int QCamera3HardwareInterface::translateToHalMetadata
11838 (const camera3_capture_request_t *request,
11839 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011840 uint32_t snapshotStreamId) {
11841 if (request == nullptr || hal_metadata == nullptr) {
11842 return BAD_VALUE;
11843 }
11844
11845 int64_t minFrameDuration = getMinFrameDuration(request);
11846
11847 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11848 minFrameDuration);
11849}
11850
11851int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11852 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11853 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11854
Thierry Strudel3d639192016-09-09 11:52:26 -070011855 int rc = 0;
11856 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011857 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011858
11859 /* Do not change the order of the following list unless you know what you are
11860 * doing.
11861 * The order is laid out in such a way that parameters in the front of the table
11862 * may be used to override the parameters later in the table. Examples are:
11863 * 1. META_MODE should precede AEC/AWB/AF MODE
11864 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11865 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11866 * 4. Any mode should precede it's corresponding settings
11867 */
11868 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11869 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11870 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11871 rc = BAD_VALUE;
11872 }
11873 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11874 if (rc != NO_ERROR) {
11875 LOGE("extractSceneMode failed");
11876 }
11877 }
11878
11879 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11880 uint8_t fwk_aeMode =
11881 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11882 uint8_t aeMode;
11883 int32_t redeye;
11884
11885 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11886 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011887 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11888 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011889 } else {
11890 aeMode = CAM_AE_MODE_ON;
11891 }
11892 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11893 redeye = 1;
11894 } else {
11895 redeye = 0;
11896 }
11897
11898 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11899 fwk_aeMode);
11900 if (NAME_NOT_FOUND != val) {
11901 int32_t flashMode = (int32_t)val;
11902 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11903 }
11904
11905 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11906 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11907 rc = BAD_VALUE;
11908 }
11909 }
11910
11911 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11912 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11913 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11914 fwk_whiteLevel);
11915 if (NAME_NOT_FOUND != val) {
11916 uint8_t whiteLevel = (uint8_t)val;
11917 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11918 rc = BAD_VALUE;
11919 }
11920 }
11921 }
11922
11923 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11924 uint8_t fwk_cacMode =
11925 frame_settings.find(
11926 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11927 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11928 fwk_cacMode);
11929 if (NAME_NOT_FOUND != val) {
11930 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11931 bool entryAvailable = FALSE;
11932 // Check whether Frameworks set CAC mode is supported in device or not
11933 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11934 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11935 entryAvailable = TRUE;
11936 break;
11937 }
11938 }
11939 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11940 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11941 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11942 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11943 if (entryAvailable == FALSE) {
11944 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11945 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11946 } else {
11947 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11948 // High is not supported and so set the FAST as spec say's underlying
11949 // device implementation can be the same for both modes.
11950 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11951 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11952 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11953 // in order to avoid the fps drop due to high quality
11954 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11955 } else {
11956 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11957 }
11958 }
11959 }
11960 LOGD("Final cacMode is %d", cacMode);
11961 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11962 rc = BAD_VALUE;
11963 }
11964 } else {
11965 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11966 }
11967 }
11968
Thierry Strudel2896d122017-02-23 19:18:03 -080011969 char af_value[PROPERTY_VALUE_MAX];
11970 property_get("persist.camera.af.infinity", af_value, "0");
11971
Jason Lee84ae9972017-02-24 13:24:24 -080011972 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011973 if (atoi(af_value) == 0) {
11974 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011975 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011976 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11977 fwk_focusMode);
11978 if (NAME_NOT_FOUND != val) {
11979 uint8_t focusMode = (uint8_t)val;
11980 LOGD("set focus mode %d", focusMode);
11981 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11982 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11983 rc = BAD_VALUE;
11984 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011985 }
11986 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011987 } else {
11988 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11989 LOGE("Focus forced to infinity %d", focusMode);
11990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11991 rc = BAD_VALUE;
11992 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011993 }
11994
Jason Lee84ae9972017-02-24 13:24:24 -080011995 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11996 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011997 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11998 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11999 focalDistance)) {
12000 rc = BAD_VALUE;
12001 }
12002 }
12003
12004 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12005 uint8_t fwk_antibandingMode =
12006 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12007 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12008 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12009 if (NAME_NOT_FOUND != val) {
12010 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012011 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12012 if (m60HzZone) {
12013 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12014 } else {
12015 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12016 }
12017 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12019 hal_antibandingMode)) {
12020 rc = BAD_VALUE;
12021 }
12022 }
12023 }
12024
12025 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12026 int32_t expCompensation = frame_settings.find(
12027 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12028 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12029 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12030 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12031 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012032 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012033 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12034 expCompensation)) {
12035 rc = BAD_VALUE;
12036 }
12037 }
12038
12039 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12040 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12042 rc = BAD_VALUE;
12043 }
12044 }
12045 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12046 rc = setHalFpsRange(frame_settings, hal_metadata);
12047 if (rc != NO_ERROR) {
12048 LOGE("setHalFpsRange failed");
12049 }
12050 }
12051
12052 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12053 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12054 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12055 rc = BAD_VALUE;
12056 }
12057 }
12058
12059 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12060 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12061 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12062 fwk_effectMode);
12063 if (NAME_NOT_FOUND != val) {
12064 uint8_t effectMode = (uint8_t)val;
12065 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12066 rc = BAD_VALUE;
12067 }
12068 }
12069 }
12070
12071 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12072 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12073 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12074 colorCorrectMode)) {
12075 rc = BAD_VALUE;
12076 }
12077 }
12078
12079 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12080 cam_color_correct_gains_t colorCorrectGains;
12081 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12082 colorCorrectGains.gains[i] =
12083 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12084 }
12085 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12086 colorCorrectGains)) {
12087 rc = BAD_VALUE;
12088 }
12089 }
12090
12091 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12092 cam_color_correct_matrix_t colorCorrectTransform;
12093 cam_rational_type_t transform_elem;
12094 size_t num = 0;
12095 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12096 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12097 transform_elem.numerator =
12098 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12099 transform_elem.denominator =
12100 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12101 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12102 num++;
12103 }
12104 }
12105 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12106 colorCorrectTransform)) {
12107 rc = BAD_VALUE;
12108 }
12109 }
12110
12111 cam_trigger_t aecTrigger;
12112 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12113 aecTrigger.trigger_id = -1;
12114 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12115 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12116 aecTrigger.trigger =
12117 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12118 aecTrigger.trigger_id =
12119 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12120 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12121 aecTrigger)) {
12122 rc = BAD_VALUE;
12123 }
12124 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12125 aecTrigger.trigger, aecTrigger.trigger_id);
12126 }
12127
12128 /*af_trigger must come with a trigger id*/
12129 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12130 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12131 cam_trigger_t af_trigger;
12132 af_trigger.trigger =
12133 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12134 af_trigger.trigger_id =
12135 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12136 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12137 rc = BAD_VALUE;
12138 }
12139 LOGD("AfTrigger: %d AfTriggerID: %d",
12140 af_trigger.trigger, af_trigger.trigger_id);
12141 }
12142
12143 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12144 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12145 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12146 rc = BAD_VALUE;
12147 }
12148 }
12149 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12150 cam_edge_application_t edge_application;
12151 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012152
Thierry Strudel3d639192016-09-09 11:52:26 -070012153 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12154 edge_application.sharpness = 0;
12155 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012156 edge_application.sharpness =
12157 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12158 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12159 int32_t sharpness =
12160 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12161 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12162 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12163 LOGD("Setting edge mode sharpness %d", sharpness);
12164 edge_application.sharpness = sharpness;
12165 }
12166 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012167 }
12168 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12169 rc = BAD_VALUE;
12170 }
12171 }
12172
12173 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12174 int32_t respectFlashMode = 1;
12175 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12176 uint8_t fwk_aeMode =
12177 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012178 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12179 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12180 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012181 respectFlashMode = 0;
12182 LOGH("AE Mode controls flash, ignore android.flash.mode");
12183 }
12184 }
12185 if (respectFlashMode) {
12186 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12187 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12188 LOGH("flash mode after mapping %d", val);
12189 // To check: CAM_INTF_META_FLASH_MODE usage
12190 if (NAME_NOT_FOUND != val) {
12191 uint8_t flashMode = (uint8_t)val;
12192 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12193 rc = BAD_VALUE;
12194 }
12195 }
12196 }
12197 }
12198
12199 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12200 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12201 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12202 rc = BAD_VALUE;
12203 }
12204 }
12205
12206 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12207 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12208 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12209 flashFiringTime)) {
12210 rc = BAD_VALUE;
12211 }
12212 }
12213
12214 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12215 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12216 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12217 hotPixelMode)) {
12218 rc = BAD_VALUE;
12219 }
12220 }
12221
12222 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12223 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12224 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12225 lensAperture)) {
12226 rc = BAD_VALUE;
12227 }
12228 }
12229
12230 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12231 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12232 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12233 filterDensity)) {
12234 rc = BAD_VALUE;
12235 }
12236 }
12237
12238 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12239 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12240 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12241 focalLength)) {
12242 rc = BAD_VALUE;
12243 }
12244 }
12245
12246 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12247 uint8_t optStabMode =
12248 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12249 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12250 optStabMode)) {
12251 rc = BAD_VALUE;
12252 }
12253 }
12254
12255 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12256 uint8_t videoStabMode =
12257 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12258 LOGD("videoStabMode from APP = %d", videoStabMode);
12259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12260 videoStabMode)) {
12261 rc = BAD_VALUE;
12262 }
12263 }
12264
12265
12266 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12267 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12268 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12269 noiseRedMode)) {
12270 rc = BAD_VALUE;
12271 }
12272 }
12273
12274 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12275 float reprocessEffectiveExposureFactor =
12276 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12277 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12278 reprocessEffectiveExposureFactor)) {
12279 rc = BAD_VALUE;
12280 }
12281 }
12282
12283 cam_crop_region_t scalerCropRegion;
12284 bool scalerCropSet = false;
12285 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12286 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12287 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12288 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12289 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12290
12291 // Map coordinate system from active array to sensor output.
12292 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12293 scalerCropRegion.width, scalerCropRegion.height);
12294
12295 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12296 scalerCropRegion)) {
12297 rc = BAD_VALUE;
12298 }
12299 scalerCropSet = true;
12300 }
12301
12302 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12303 int64_t sensorExpTime =
12304 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12305 LOGD("setting sensorExpTime %lld", sensorExpTime);
12306 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12307 sensorExpTime)) {
12308 rc = BAD_VALUE;
12309 }
12310 }
12311
12312 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12313 int64_t sensorFrameDuration =
12314 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012315 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12316 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12317 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12318 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12319 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12320 sensorFrameDuration)) {
12321 rc = BAD_VALUE;
12322 }
12323 }
12324
12325 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12326 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12327 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12328 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12329 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12330 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12331 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12332 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12333 sensorSensitivity)) {
12334 rc = BAD_VALUE;
12335 }
12336 }
12337
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012338#ifndef USE_HAL_3_3
12339 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12340 int32_t ispSensitivity =
12341 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12342 if (ispSensitivity <
12343 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12344 ispSensitivity =
12345 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12346 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12347 }
12348 if (ispSensitivity >
12349 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12350 ispSensitivity =
12351 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12352 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12353 }
12354 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12355 ispSensitivity)) {
12356 rc = BAD_VALUE;
12357 }
12358 }
12359#endif
12360
Thierry Strudel3d639192016-09-09 11:52:26 -070012361 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12362 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12364 rc = BAD_VALUE;
12365 }
12366 }
12367
12368 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12369 uint8_t fwk_facedetectMode =
12370 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12371
12372 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12373 fwk_facedetectMode);
12374
12375 if (NAME_NOT_FOUND != val) {
12376 uint8_t facedetectMode = (uint8_t)val;
12377 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12378 facedetectMode)) {
12379 rc = BAD_VALUE;
12380 }
12381 }
12382 }
12383
Thierry Strudel54dc9782017-02-15 12:12:10 -080012384 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012385 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012386 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012387 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12388 histogramMode)) {
12389 rc = BAD_VALUE;
12390 }
12391 }
12392
12393 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12394 uint8_t sharpnessMapMode =
12395 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12396 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12397 sharpnessMapMode)) {
12398 rc = BAD_VALUE;
12399 }
12400 }
12401
12402 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12403 uint8_t tonemapMode =
12404 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12405 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12406 rc = BAD_VALUE;
12407 }
12408 }
12409 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12410 /*All tonemap channels will have the same number of points*/
12411 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12412 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12413 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12414 cam_rgb_tonemap_curves tonemapCurves;
12415 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12416 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12417 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12418 tonemapCurves.tonemap_points_cnt,
12419 CAM_MAX_TONEMAP_CURVE_SIZE);
12420 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12421 }
12422
12423 /* ch0 = G*/
12424 size_t point = 0;
12425 cam_tonemap_curve_t tonemapCurveGreen;
12426 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12427 for (size_t j = 0; j < 2; j++) {
12428 tonemapCurveGreen.tonemap_points[i][j] =
12429 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12430 point++;
12431 }
12432 }
12433 tonemapCurves.curves[0] = tonemapCurveGreen;
12434
12435 /* ch 1 = B */
12436 point = 0;
12437 cam_tonemap_curve_t tonemapCurveBlue;
12438 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12439 for (size_t j = 0; j < 2; j++) {
12440 tonemapCurveBlue.tonemap_points[i][j] =
12441 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12442 point++;
12443 }
12444 }
12445 tonemapCurves.curves[1] = tonemapCurveBlue;
12446
12447 /* ch 2 = R */
12448 point = 0;
12449 cam_tonemap_curve_t tonemapCurveRed;
12450 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12451 for (size_t j = 0; j < 2; j++) {
12452 tonemapCurveRed.tonemap_points[i][j] =
12453 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12454 point++;
12455 }
12456 }
12457 tonemapCurves.curves[2] = tonemapCurveRed;
12458
12459 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12460 tonemapCurves)) {
12461 rc = BAD_VALUE;
12462 }
12463 }
12464
12465 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12466 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12467 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12468 captureIntent)) {
12469 rc = BAD_VALUE;
12470 }
12471 }
12472
12473 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12474 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12475 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12476 blackLevelLock)) {
12477 rc = BAD_VALUE;
12478 }
12479 }
12480
12481 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12482 uint8_t lensShadingMapMode =
12483 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12484 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12485 lensShadingMapMode)) {
12486 rc = BAD_VALUE;
12487 }
12488 }
12489
12490 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12491 cam_area_t roi;
12492 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012493 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012494
12495 // Map coordinate system from active array to sensor output.
12496 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12497 roi.rect.height);
12498
12499 if (scalerCropSet) {
12500 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12501 }
12502 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12503 rc = BAD_VALUE;
12504 }
12505 }
12506
12507 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12508 cam_area_t roi;
12509 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012510 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012511
12512 // Map coordinate system from active array to sensor output.
12513 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12514 roi.rect.height);
12515
12516 if (scalerCropSet) {
12517 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12518 }
12519 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12520 rc = BAD_VALUE;
12521 }
12522 }
12523
12524 // CDS for non-HFR non-video mode
12525 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12526 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12527 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12528 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12529 LOGE("Invalid CDS mode %d!", *fwk_cds);
12530 } else {
12531 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12532 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12533 rc = BAD_VALUE;
12534 }
12535 }
12536 }
12537
Thierry Strudel04e026f2016-10-10 11:27:36 -070012538 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012539 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012540 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012541 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12542 }
12543 if (m_bVideoHdrEnabled)
12544 vhdr = CAM_VIDEO_HDR_MODE_ON;
12545
Thierry Strudel54dc9782017-02-15 12:12:10 -080012546 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12547
12548 if(vhdr != curr_hdr_state)
12549 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12550
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012551 rc = setVideoHdrMode(mParameters, vhdr);
12552 if (rc != NO_ERROR) {
12553 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012554 }
12555
12556 //IR
12557 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12558 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12559 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012560 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12561 uint8_t isIRon = 0;
12562
12563 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012564 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12565 LOGE("Invalid IR mode %d!", fwk_ir);
12566 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012567 if(isIRon != curr_ir_state )
12568 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12569
Thierry Strudel04e026f2016-10-10 11:27:36 -070012570 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12571 CAM_INTF_META_IR_MODE, fwk_ir)) {
12572 rc = BAD_VALUE;
12573 }
12574 }
12575 }
12576
Thierry Strudel54dc9782017-02-15 12:12:10 -080012577 //Binning Correction Mode
12578 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12579 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12580 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12581 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12582 || (0 > fwk_binning_correction)) {
12583 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12584 } else {
12585 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12586 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12587 rc = BAD_VALUE;
12588 }
12589 }
12590 }
12591
Thierry Strudel269c81a2016-10-12 12:13:59 -070012592 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12593 float aec_speed;
12594 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12595 LOGD("AEC Speed :%f", aec_speed);
12596 if ( aec_speed < 0 ) {
12597 LOGE("Invalid AEC mode %f!", aec_speed);
12598 } else {
12599 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12600 aec_speed)) {
12601 rc = BAD_VALUE;
12602 }
12603 }
12604 }
12605
12606 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12607 float awb_speed;
12608 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12609 LOGD("AWB Speed :%f", awb_speed);
12610 if ( awb_speed < 0 ) {
12611 LOGE("Invalid AWB mode %f!", awb_speed);
12612 } else {
12613 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12614 awb_speed)) {
12615 rc = BAD_VALUE;
12616 }
12617 }
12618 }
12619
Thierry Strudel3d639192016-09-09 11:52:26 -070012620 // TNR
12621 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12622 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12623 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012624 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012625 cam_denoise_param_t tnr;
12626 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12627 tnr.process_plates =
12628 (cam_denoise_process_type_t)frame_settings.find(
12629 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12630 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012631
12632 if(b_TnrRequested != curr_tnr_state)
12633 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12634
Thierry Strudel3d639192016-09-09 11:52:26 -070012635 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12636 rc = BAD_VALUE;
12637 }
12638 }
12639
Thierry Strudel54dc9782017-02-15 12:12:10 -080012640 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012641 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012642 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012643 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12644 *exposure_metering_mode)) {
12645 rc = BAD_VALUE;
12646 }
12647 }
12648
Thierry Strudel3d639192016-09-09 11:52:26 -070012649 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12650 int32_t fwk_testPatternMode =
12651 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12652 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12653 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12654
12655 if (NAME_NOT_FOUND != testPatternMode) {
12656 cam_test_pattern_data_t testPatternData;
12657 memset(&testPatternData, 0, sizeof(testPatternData));
12658 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12659 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12660 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12661 int32_t *fwk_testPatternData =
12662 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12663 testPatternData.r = fwk_testPatternData[0];
12664 testPatternData.b = fwk_testPatternData[3];
12665 switch (gCamCapability[mCameraId]->color_arrangement) {
12666 case CAM_FILTER_ARRANGEMENT_RGGB:
12667 case CAM_FILTER_ARRANGEMENT_GRBG:
12668 testPatternData.gr = fwk_testPatternData[1];
12669 testPatternData.gb = fwk_testPatternData[2];
12670 break;
12671 case CAM_FILTER_ARRANGEMENT_GBRG:
12672 case CAM_FILTER_ARRANGEMENT_BGGR:
12673 testPatternData.gr = fwk_testPatternData[2];
12674 testPatternData.gb = fwk_testPatternData[1];
12675 break;
12676 default:
12677 LOGE("color arrangement %d is not supported",
12678 gCamCapability[mCameraId]->color_arrangement);
12679 break;
12680 }
12681 }
12682 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12683 testPatternData)) {
12684 rc = BAD_VALUE;
12685 }
12686 } else {
12687 LOGE("Invalid framework sensor test pattern mode %d",
12688 fwk_testPatternMode);
12689 }
12690 }
12691
12692 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12693 size_t count = 0;
12694 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12695 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12696 gps_coords.data.d, gps_coords.count, count);
12697 if (gps_coords.count != count) {
12698 rc = BAD_VALUE;
12699 }
12700 }
12701
12702 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12703 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12704 size_t count = 0;
12705 const char *gps_methods_src = (const char *)
12706 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12707 memset(gps_methods, '\0', sizeof(gps_methods));
12708 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12709 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12710 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12711 if (GPS_PROCESSING_METHOD_SIZE != count) {
12712 rc = BAD_VALUE;
12713 }
12714 }
12715
12716 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12717 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12718 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12719 gps_timestamp)) {
12720 rc = BAD_VALUE;
12721 }
12722 }
12723
12724 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12725 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12726 cam_rotation_info_t rotation_info;
12727 if (orientation == 0) {
12728 rotation_info.rotation = ROTATE_0;
12729 } else if (orientation == 90) {
12730 rotation_info.rotation = ROTATE_90;
12731 } else if (orientation == 180) {
12732 rotation_info.rotation = ROTATE_180;
12733 } else if (orientation == 270) {
12734 rotation_info.rotation = ROTATE_270;
12735 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012736 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012737 rotation_info.streamId = snapshotStreamId;
12738 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12739 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12740 rc = BAD_VALUE;
12741 }
12742 }
12743
12744 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12745 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12746 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12747 rc = BAD_VALUE;
12748 }
12749 }
12750
12751 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12752 uint32_t thumb_quality = (uint32_t)
12753 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12754 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12755 thumb_quality)) {
12756 rc = BAD_VALUE;
12757 }
12758 }
12759
12760 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12761 cam_dimension_t dim;
12762 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12763 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12764 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12765 rc = BAD_VALUE;
12766 }
12767 }
12768
12769 // Internal metadata
12770 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12771 size_t count = 0;
12772 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12773 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12774 privatedata.data.i32, privatedata.count, count);
12775 if (privatedata.count != count) {
12776 rc = BAD_VALUE;
12777 }
12778 }
12779
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012780 // ISO/Exposure Priority
12781 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12782 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12783 cam_priority_mode_t mode =
12784 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12785 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12786 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12787 use_iso_exp_pty.previewOnly = FALSE;
12788 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12789 use_iso_exp_pty.value = *ptr;
12790
12791 if(CAM_ISO_PRIORITY == mode) {
12792 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12793 use_iso_exp_pty)) {
12794 rc = BAD_VALUE;
12795 }
12796 }
12797 else {
12798 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12799 use_iso_exp_pty)) {
12800 rc = BAD_VALUE;
12801 }
12802 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012803
12804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12805 rc = BAD_VALUE;
12806 }
12807 }
12808 } else {
12809 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12810 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012811 }
12812 }
12813
12814 // Saturation
12815 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12816 int32_t* use_saturation =
12817 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12818 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12819 rc = BAD_VALUE;
12820 }
12821 }
12822
Thierry Strudel3d639192016-09-09 11:52:26 -070012823 // EV step
12824 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12825 gCamCapability[mCameraId]->exp_compensation_step)) {
12826 rc = BAD_VALUE;
12827 }
12828
12829 // CDS info
12830 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12831 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12832 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12833
12834 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12835 CAM_INTF_META_CDS_DATA, *cdsData)) {
12836 rc = BAD_VALUE;
12837 }
12838 }
12839
Shuzhen Wang19463d72016-03-08 11:09:52 -080012840 // Hybrid AE
12841 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12842 uint8_t *hybrid_ae = (uint8_t *)
12843 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12844
12845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12846 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12847 rc = BAD_VALUE;
12848 }
12849 }
12850
Shuzhen Wang14415f52016-11-16 18:26:18 -080012851 // Histogram
12852 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12853 uint8_t histogramMode =
12854 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12855 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12856 histogramMode)) {
12857 rc = BAD_VALUE;
12858 }
12859 }
12860
12861 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12862 int32_t histogramBins =
12863 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12864 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12865 histogramBins)) {
12866 rc = BAD_VALUE;
12867 }
12868 }
12869
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012870 // Tracking AF
12871 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12872 uint8_t trackingAfTrigger =
12873 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12874 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12875 trackingAfTrigger)) {
12876 rc = BAD_VALUE;
12877 }
12878 }
12879
Thierry Strudel3d639192016-09-09 11:52:26 -070012880 return rc;
12881}
12882
12883/*===========================================================================
12884 * FUNCTION : captureResultCb
12885 *
12886 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12887 *
12888 * PARAMETERS :
12889 * @frame : frame information from mm-camera-interface
12890 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12891 * @userdata: userdata
12892 *
12893 * RETURN : NONE
12894 *==========================================================================*/
12895void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12896 camera3_stream_buffer_t *buffer,
12897 uint32_t frame_number, bool isInputBuffer, void *userdata)
12898{
12899 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12900 if (hw == NULL) {
12901 LOGE("Invalid hw %p", hw);
12902 return;
12903 }
12904
12905 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12906 return;
12907}
12908
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012909/*===========================================================================
12910 * FUNCTION : setBufferErrorStatus
12911 *
12912 * DESCRIPTION: Callback handler for channels to report any buffer errors
12913 *
12914 * PARAMETERS :
12915 * @ch : Channel on which buffer error is reported from
12916 * @frame_number : frame number on which buffer error is reported on
12917 * @buffer_status : buffer error status
12918 * @userdata: userdata
12919 *
12920 * RETURN : NONE
12921 *==========================================================================*/
12922void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12923 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12924{
12925 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12926 if (hw == NULL) {
12927 LOGE("Invalid hw %p", hw);
12928 return;
12929 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012930
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012931 hw->setBufferErrorStatus(ch, frame_number, err);
12932 return;
12933}
12934
12935void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12936 uint32_t frameNumber, camera3_buffer_status_t err)
12937{
12938 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12939 pthread_mutex_lock(&mMutex);
12940
12941 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12942 if (req.frame_number != frameNumber)
12943 continue;
12944 for (auto& k : req.mPendingBufferList) {
12945 if(k.stream->priv == ch) {
12946 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12947 }
12948 }
12949 }
12950
12951 pthread_mutex_unlock(&mMutex);
12952 return;
12953}
Thierry Strudel3d639192016-09-09 11:52:26 -070012954/*===========================================================================
12955 * FUNCTION : initialize
12956 *
12957 * DESCRIPTION: Pass framework callback pointers to HAL
12958 *
12959 * PARAMETERS :
12960 *
12961 *
12962 * RETURN : Success : 0
12963 * Failure: -ENODEV
12964 *==========================================================================*/
12965
12966int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12967 const camera3_callback_ops_t *callback_ops)
12968{
12969 LOGD("E");
12970 QCamera3HardwareInterface *hw =
12971 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12972 if (!hw) {
12973 LOGE("NULL camera device");
12974 return -ENODEV;
12975 }
12976
12977 int rc = hw->initialize(callback_ops);
12978 LOGD("X");
12979 return rc;
12980}
12981
12982/*===========================================================================
12983 * FUNCTION : configure_streams
12984 *
12985 * DESCRIPTION:
12986 *
12987 * PARAMETERS :
12988 *
12989 *
12990 * RETURN : Success: 0
12991 * Failure: -EINVAL (if stream configuration is invalid)
12992 * -ENODEV (fatal error)
12993 *==========================================================================*/
12994
12995int QCamera3HardwareInterface::configure_streams(
12996 const struct camera3_device *device,
12997 camera3_stream_configuration_t *stream_list)
12998{
12999 LOGD("E");
13000 QCamera3HardwareInterface *hw =
13001 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13002 if (!hw) {
13003 LOGE("NULL camera device");
13004 return -ENODEV;
13005 }
13006 int rc = hw->configureStreams(stream_list);
13007 LOGD("X");
13008 return rc;
13009}
13010
13011/*===========================================================================
13012 * FUNCTION : construct_default_request_settings
13013 *
13014 * DESCRIPTION: Configure a settings buffer to meet the required use case
13015 *
13016 * PARAMETERS :
13017 *
13018 *
13019 * RETURN : Success: Return valid metadata
13020 * Failure: Return NULL
13021 *==========================================================================*/
13022const camera_metadata_t* QCamera3HardwareInterface::
13023 construct_default_request_settings(const struct camera3_device *device,
13024 int type)
13025{
13026
13027 LOGD("E");
13028 camera_metadata_t* fwk_metadata = NULL;
13029 QCamera3HardwareInterface *hw =
13030 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13031 if (!hw) {
13032 LOGE("NULL camera device");
13033 return NULL;
13034 }
13035
13036 fwk_metadata = hw->translateCapabilityToMetadata(type);
13037
13038 LOGD("X");
13039 return fwk_metadata;
13040}
13041
13042/*===========================================================================
13043 * FUNCTION : process_capture_request
13044 *
13045 * DESCRIPTION:
13046 *
13047 * PARAMETERS :
13048 *
13049 *
13050 * RETURN :
13051 *==========================================================================*/
13052int QCamera3HardwareInterface::process_capture_request(
13053 const struct camera3_device *device,
13054 camera3_capture_request_t *request)
13055{
13056 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013057 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013058 QCamera3HardwareInterface *hw =
13059 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13060 if (!hw) {
13061 LOGE("NULL camera device");
13062 return -EINVAL;
13063 }
13064
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013065 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013066 LOGD("X");
13067 return rc;
13068}
13069
13070/*===========================================================================
13071 * FUNCTION : dump
13072 *
13073 * DESCRIPTION:
13074 *
13075 * PARAMETERS :
13076 *
13077 *
13078 * RETURN :
13079 *==========================================================================*/
13080
13081void QCamera3HardwareInterface::dump(
13082 const struct camera3_device *device, int fd)
13083{
13084 /* Log level property is read when "adb shell dumpsys media.camera" is
13085 called so that the log level can be controlled without restarting
13086 the media server */
13087 getLogLevel();
13088
13089 LOGD("E");
13090 QCamera3HardwareInterface *hw =
13091 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13092 if (!hw) {
13093 LOGE("NULL camera device");
13094 return;
13095 }
13096
13097 hw->dump(fd);
13098 LOGD("X");
13099 return;
13100}
13101
13102/*===========================================================================
13103 * FUNCTION : flush
13104 *
13105 * DESCRIPTION:
13106 *
13107 * PARAMETERS :
13108 *
13109 *
13110 * RETURN :
13111 *==========================================================================*/
13112
13113int QCamera3HardwareInterface::flush(
13114 const struct camera3_device *device)
13115{
13116 int rc;
13117 LOGD("E");
13118 QCamera3HardwareInterface *hw =
13119 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13120 if (!hw) {
13121 LOGE("NULL camera device");
13122 return -EINVAL;
13123 }
13124
13125 pthread_mutex_lock(&hw->mMutex);
13126 // Validate current state
13127 switch (hw->mState) {
13128 case STARTED:
13129 /* valid state */
13130 break;
13131
13132 case ERROR:
13133 pthread_mutex_unlock(&hw->mMutex);
13134 hw->handleCameraDeviceError();
13135 return -ENODEV;
13136
13137 default:
13138 LOGI("Flush returned during state %d", hw->mState);
13139 pthread_mutex_unlock(&hw->mMutex);
13140 return 0;
13141 }
13142 pthread_mutex_unlock(&hw->mMutex);
13143
13144 rc = hw->flush(true /* restart channels */ );
13145 LOGD("X");
13146 return rc;
13147}
13148
13149/*===========================================================================
13150 * FUNCTION : close_camera_device
13151 *
13152 * DESCRIPTION:
13153 *
13154 * PARAMETERS :
13155 *
13156 *
13157 * RETURN :
13158 *==========================================================================*/
13159int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13160{
13161 int ret = NO_ERROR;
13162 QCamera3HardwareInterface *hw =
13163 reinterpret_cast<QCamera3HardwareInterface *>(
13164 reinterpret_cast<camera3_device_t *>(device)->priv);
13165 if (!hw) {
13166 LOGE("NULL camera device");
13167 return BAD_VALUE;
13168 }
13169
13170 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13171 delete hw;
13172 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013173 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013174 return ret;
13175}
13176
13177/*===========================================================================
13178 * FUNCTION : getWaveletDenoiseProcessPlate
13179 *
13180 * DESCRIPTION: query wavelet denoise process plate
13181 *
13182 * PARAMETERS : None
13183 *
13184 * RETURN : WNR prcocess plate value
13185 *==========================================================================*/
13186cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13187{
13188 char prop[PROPERTY_VALUE_MAX];
13189 memset(prop, 0, sizeof(prop));
13190 property_get("persist.denoise.process.plates", prop, "0");
13191 int processPlate = atoi(prop);
13192 switch(processPlate) {
13193 case 0:
13194 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13195 case 1:
13196 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13197 case 2:
13198 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13199 case 3:
13200 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13201 default:
13202 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13203 }
13204}
13205
13206
13207/*===========================================================================
13208 * FUNCTION : getTemporalDenoiseProcessPlate
13209 *
13210 * DESCRIPTION: query temporal denoise process plate
13211 *
13212 * PARAMETERS : None
13213 *
13214 * RETURN : TNR prcocess plate value
13215 *==========================================================================*/
13216cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13217{
13218 char prop[PROPERTY_VALUE_MAX];
13219 memset(prop, 0, sizeof(prop));
13220 property_get("persist.tnr.process.plates", prop, "0");
13221 int processPlate = atoi(prop);
13222 switch(processPlate) {
13223 case 0:
13224 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13225 case 1:
13226 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13227 case 2:
13228 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13229 case 3:
13230 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13231 default:
13232 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13233 }
13234}
13235
13236
13237/*===========================================================================
13238 * FUNCTION : extractSceneMode
13239 *
13240 * DESCRIPTION: Extract scene mode from frameworks set metadata
13241 *
13242 * PARAMETERS :
13243 * @frame_settings: CameraMetadata reference
13244 * @metaMode: ANDROID_CONTORL_MODE
13245 * @hal_metadata: hal metadata structure
13246 *
13247 * RETURN : None
13248 *==========================================================================*/
13249int32_t QCamera3HardwareInterface::extractSceneMode(
13250 const CameraMetadata &frame_settings, uint8_t metaMode,
13251 metadata_buffer_t *hal_metadata)
13252{
13253 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013254 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13255
13256 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13257 LOGD("Ignoring control mode OFF_KEEP_STATE");
13258 return NO_ERROR;
13259 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013260
13261 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13262 camera_metadata_ro_entry entry =
13263 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13264 if (0 == entry.count)
13265 return rc;
13266
13267 uint8_t fwk_sceneMode = entry.data.u8[0];
13268
13269 int val = lookupHalName(SCENE_MODES_MAP,
13270 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13271 fwk_sceneMode);
13272 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013273 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013274 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013275 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013276 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013277
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013278 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13279 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13280 }
13281
13282 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13283 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013284 cam_hdr_param_t hdr_params;
13285 hdr_params.hdr_enable = 1;
13286 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13287 hdr_params.hdr_need_1x = false;
13288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13289 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13290 rc = BAD_VALUE;
13291 }
13292 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013293
Thierry Strudel3d639192016-09-09 11:52:26 -070013294 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13295 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13296 rc = BAD_VALUE;
13297 }
13298 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013299
13300 if (mForceHdrSnapshot) {
13301 cam_hdr_param_t hdr_params;
13302 hdr_params.hdr_enable = 1;
13303 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13304 hdr_params.hdr_need_1x = false;
13305 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13306 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13307 rc = BAD_VALUE;
13308 }
13309 }
13310
Thierry Strudel3d639192016-09-09 11:52:26 -070013311 return rc;
13312}
13313
13314/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013315 * FUNCTION : setVideoHdrMode
13316 *
13317 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13318 *
13319 * PARAMETERS :
13320 * @hal_metadata: hal metadata structure
13321 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13322 *
13323 * RETURN : None
13324 *==========================================================================*/
13325int32_t QCamera3HardwareInterface::setVideoHdrMode(
13326 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13327{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013328 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13329 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13330 }
13331
13332 LOGE("Invalid Video HDR mode %d!", vhdr);
13333 return BAD_VALUE;
13334}
13335
13336/*===========================================================================
13337 * FUNCTION : setSensorHDR
13338 *
13339 * DESCRIPTION: Enable/disable sensor HDR.
13340 *
13341 * PARAMETERS :
13342 * @hal_metadata: hal metadata structure
13343 * @enable: boolean whether to enable/disable sensor HDR
13344 *
13345 * RETURN : None
13346 *==========================================================================*/
13347int32_t QCamera3HardwareInterface::setSensorHDR(
13348 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13349{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013350 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013351 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13352
13353 if (enable) {
13354 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13355 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13356 #ifdef _LE_CAMERA_
13357 //Default to staggered HDR for IOT
13358 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13359 #else
13360 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13361 #endif
13362 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13363 }
13364
13365 bool isSupported = false;
13366 switch (sensor_hdr) {
13367 case CAM_SENSOR_HDR_IN_SENSOR:
13368 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13369 CAM_QCOM_FEATURE_SENSOR_HDR) {
13370 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013371 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013372 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013373 break;
13374 case CAM_SENSOR_HDR_ZIGZAG:
13375 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13376 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13377 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013378 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013379 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013380 break;
13381 case CAM_SENSOR_HDR_STAGGERED:
13382 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13383 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13384 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013385 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013386 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013387 break;
13388 case CAM_SENSOR_HDR_OFF:
13389 isSupported = true;
13390 LOGD("Turning off sensor HDR");
13391 break;
13392 default:
13393 LOGE("HDR mode %d not supported", sensor_hdr);
13394 rc = BAD_VALUE;
13395 break;
13396 }
13397
13398 if(isSupported) {
13399 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13400 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13401 rc = BAD_VALUE;
13402 } else {
13403 if(!isVideoHdrEnable)
13404 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013405 }
13406 }
13407 return rc;
13408}
13409
13410/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013411 * FUNCTION : needRotationReprocess
13412 *
13413 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13414 *
13415 * PARAMETERS : none
13416 *
13417 * RETURN : true: needed
13418 * false: no need
13419 *==========================================================================*/
13420bool QCamera3HardwareInterface::needRotationReprocess()
13421{
13422 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13423 // current rotation is not zero, and pp has the capability to process rotation
13424 LOGH("need do reprocess for rotation");
13425 return true;
13426 }
13427
13428 return false;
13429}
13430
13431/*===========================================================================
13432 * FUNCTION : needReprocess
13433 *
13434 * DESCRIPTION: if reprocess in needed
13435 *
13436 * PARAMETERS : none
13437 *
13438 * RETURN : true: needed
13439 * false: no need
13440 *==========================================================================*/
13441bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13442{
13443 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13444 // TODO: add for ZSL HDR later
13445 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13446 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13447 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13448 return true;
13449 } else {
13450 LOGH("already post processed frame");
13451 return false;
13452 }
13453 }
13454 return needRotationReprocess();
13455}
13456
13457/*===========================================================================
13458 * FUNCTION : needJpegExifRotation
13459 *
13460 * DESCRIPTION: if rotation from jpeg is needed
13461 *
13462 * PARAMETERS : none
13463 *
13464 * RETURN : true: needed
13465 * false: no need
13466 *==========================================================================*/
13467bool QCamera3HardwareInterface::needJpegExifRotation()
13468{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013469 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013470 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13471 LOGD("Need use Jpeg EXIF Rotation");
13472 return true;
13473 }
13474 return false;
13475}
13476
13477/*===========================================================================
13478 * FUNCTION : addOfflineReprocChannel
13479 *
13480 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13481 * coming from input channel
13482 *
13483 * PARAMETERS :
13484 * @config : reprocess configuration
13485 * @inputChHandle : pointer to the input (source) channel
13486 *
13487 *
13488 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13489 *==========================================================================*/
13490QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13491 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13492{
13493 int32_t rc = NO_ERROR;
13494 QCamera3ReprocessChannel *pChannel = NULL;
13495
13496 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013497 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13498 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013499 if (NULL == pChannel) {
13500 LOGE("no mem for reprocess channel");
13501 return NULL;
13502 }
13503
13504 rc = pChannel->initialize(IS_TYPE_NONE);
13505 if (rc != NO_ERROR) {
13506 LOGE("init reprocess channel failed, ret = %d", rc);
13507 delete pChannel;
13508 return NULL;
13509 }
13510
13511 // pp feature config
13512 cam_pp_feature_config_t pp_config;
13513 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13514
13515 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13516 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13517 & CAM_QCOM_FEATURE_DSDN) {
13518 //Use CPP CDS incase h/w supports it.
13519 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13520 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13521 }
13522 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13523 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13524 }
13525
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013526 if (config.hdr_param.hdr_enable) {
13527 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13528 pp_config.hdr_param = config.hdr_param;
13529 }
13530
13531 if (mForceHdrSnapshot) {
13532 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13533 pp_config.hdr_param.hdr_enable = 1;
13534 pp_config.hdr_param.hdr_need_1x = 0;
13535 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13536 }
13537
Thierry Strudel3d639192016-09-09 11:52:26 -070013538 rc = pChannel->addReprocStreamsFromSource(pp_config,
13539 config,
13540 IS_TYPE_NONE,
13541 mMetadataChannel);
13542
13543 if (rc != NO_ERROR) {
13544 delete pChannel;
13545 return NULL;
13546 }
13547 return pChannel;
13548}
13549
13550/*===========================================================================
13551 * FUNCTION : getMobicatMask
13552 *
13553 * DESCRIPTION: returns mobicat mask
13554 *
13555 * PARAMETERS : none
13556 *
13557 * RETURN : mobicat mask
13558 *
13559 *==========================================================================*/
13560uint8_t QCamera3HardwareInterface::getMobicatMask()
13561{
13562 return m_MobicatMask;
13563}
13564
13565/*===========================================================================
13566 * FUNCTION : setMobicat
13567 *
13568 * DESCRIPTION: set Mobicat on/off.
13569 *
13570 * PARAMETERS :
13571 * @params : none
13572 *
13573 * RETURN : int32_t type of status
13574 * NO_ERROR -- success
13575 * none-zero failure code
13576 *==========================================================================*/
13577int32_t QCamera3HardwareInterface::setMobicat()
13578{
13579 char value [PROPERTY_VALUE_MAX];
13580 property_get("persist.camera.mobicat", value, "0");
13581 int32_t ret = NO_ERROR;
13582 uint8_t enableMobi = (uint8_t)atoi(value);
13583
13584 if (enableMobi) {
13585 tune_cmd_t tune_cmd;
13586 tune_cmd.type = SET_RELOAD_CHROMATIX;
13587 tune_cmd.module = MODULE_ALL;
13588 tune_cmd.value = TRUE;
13589 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13590 CAM_INTF_PARM_SET_VFE_COMMAND,
13591 tune_cmd);
13592
13593 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13594 CAM_INTF_PARM_SET_PP_COMMAND,
13595 tune_cmd);
13596 }
13597 m_MobicatMask = enableMobi;
13598
13599 return ret;
13600}
13601
13602/*===========================================================================
13603* FUNCTION : getLogLevel
13604*
13605* DESCRIPTION: Reads the log level property into a variable
13606*
13607* PARAMETERS :
13608* None
13609*
13610* RETURN :
13611* None
13612*==========================================================================*/
13613void QCamera3HardwareInterface::getLogLevel()
13614{
13615 char prop[PROPERTY_VALUE_MAX];
13616 uint32_t globalLogLevel = 0;
13617
13618 property_get("persist.camera.hal.debug", prop, "0");
13619 int val = atoi(prop);
13620 if (0 <= val) {
13621 gCamHal3LogLevel = (uint32_t)val;
13622 }
13623
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013624 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013625 gKpiDebugLevel = atoi(prop);
13626
13627 property_get("persist.camera.global.debug", prop, "0");
13628 val = atoi(prop);
13629 if (0 <= val) {
13630 globalLogLevel = (uint32_t)val;
13631 }
13632
13633 /* Highest log level among hal.logs and global.logs is selected */
13634 if (gCamHal3LogLevel < globalLogLevel)
13635 gCamHal3LogLevel = globalLogLevel;
13636
13637 return;
13638}
13639
13640/*===========================================================================
13641 * FUNCTION : validateStreamRotations
13642 *
13643 * DESCRIPTION: Check if the rotations requested are supported
13644 *
13645 * PARAMETERS :
13646 * @stream_list : streams to be configured
13647 *
13648 * RETURN : NO_ERROR on success
13649 * -EINVAL on failure
13650 *
13651 *==========================================================================*/
13652int QCamera3HardwareInterface::validateStreamRotations(
13653 camera3_stream_configuration_t *streamList)
13654{
13655 int rc = NO_ERROR;
13656
13657 /*
13658 * Loop through all streams requested in configuration
13659 * Check if unsupported rotations have been requested on any of them
13660 */
13661 for (size_t j = 0; j < streamList->num_streams; j++){
13662 camera3_stream_t *newStream = streamList->streams[j];
13663
13664 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13665 bool isImplDef = (newStream->format ==
13666 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13667 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13668 isImplDef);
13669
13670 if (isRotated && (!isImplDef || isZsl)) {
13671 LOGE("Error: Unsupported rotation of %d requested for stream"
13672 "type:%d and stream format:%d",
13673 newStream->rotation, newStream->stream_type,
13674 newStream->format);
13675 rc = -EINVAL;
13676 break;
13677 }
13678 }
13679
13680 return rc;
13681}
13682
13683/*===========================================================================
13684* FUNCTION : getFlashInfo
13685*
13686* DESCRIPTION: Retrieve information about whether the device has a flash.
13687*
13688* PARAMETERS :
13689* @cameraId : Camera id to query
13690* @hasFlash : Boolean indicating whether there is a flash device
13691* associated with given camera
13692* @flashNode : If a flash device exists, this will be its device node.
13693*
13694* RETURN :
13695* None
13696*==========================================================================*/
13697void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13698 bool& hasFlash,
13699 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13700{
13701 cam_capability_t* camCapability = gCamCapability[cameraId];
13702 if (NULL == camCapability) {
13703 hasFlash = false;
13704 flashNode[0] = '\0';
13705 } else {
13706 hasFlash = camCapability->flash_available;
13707 strlcpy(flashNode,
13708 (char*)camCapability->flash_dev_name,
13709 QCAMERA_MAX_FILEPATH_LENGTH);
13710 }
13711}
13712
13713/*===========================================================================
13714* FUNCTION : getEepromVersionInfo
13715*
13716* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13717*
13718* PARAMETERS : None
13719*
13720* RETURN : string describing EEPROM version
13721* "\0" if no such info available
13722*==========================================================================*/
13723const char *QCamera3HardwareInterface::getEepromVersionInfo()
13724{
13725 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13726}
13727
13728/*===========================================================================
13729* FUNCTION : getLdafCalib
13730*
13731* DESCRIPTION: Retrieve Laser AF calibration data
13732*
13733* PARAMETERS : None
13734*
13735* RETURN : Two uint32_t describing laser AF calibration data
13736* NULL if none is available.
13737*==========================================================================*/
13738const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13739{
13740 if (mLdafCalibExist) {
13741 return &mLdafCalib[0];
13742 } else {
13743 return NULL;
13744 }
13745}
13746
13747/*===========================================================================
13748 * FUNCTION : dynamicUpdateMetaStreamInfo
13749 *
13750 * DESCRIPTION: This function:
13751 * (1) stops all the channels
13752 * (2) returns error on pending requests and buffers
13753 * (3) sends metastream_info in setparams
13754 * (4) starts all channels
13755 * This is useful when sensor has to be restarted to apply any
13756 * settings such as frame rate from a different sensor mode
13757 *
13758 * PARAMETERS : None
13759 *
13760 * RETURN : NO_ERROR on success
13761 * Error codes on failure
13762 *
13763 *==========================================================================*/
13764int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13765{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013766 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013767 int rc = NO_ERROR;
13768
13769 LOGD("E");
13770
13771 rc = stopAllChannels();
13772 if (rc < 0) {
13773 LOGE("stopAllChannels failed");
13774 return rc;
13775 }
13776
13777 rc = notifyErrorForPendingRequests();
13778 if (rc < 0) {
13779 LOGE("notifyErrorForPendingRequests failed");
13780 return rc;
13781 }
13782
13783 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13784 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13785 "Format:%d",
13786 mStreamConfigInfo.type[i],
13787 mStreamConfigInfo.stream_sizes[i].width,
13788 mStreamConfigInfo.stream_sizes[i].height,
13789 mStreamConfigInfo.postprocess_mask[i],
13790 mStreamConfigInfo.format[i]);
13791 }
13792
13793 /* Send meta stream info once again so that ISP can start */
13794 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13795 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13796 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13797 mParameters);
13798 if (rc < 0) {
13799 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13800 }
13801
13802 rc = startAllChannels();
13803 if (rc < 0) {
13804 LOGE("startAllChannels failed");
13805 return rc;
13806 }
13807
13808 LOGD("X");
13809 return rc;
13810}
13811
13812/*===========================================================================
13813 * FUNCTION : stopAllChannels
13814 *
13815 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13816 *
13817 * PARAMETERS : None
13818 *
13819 * RETURN : NO_ERROR on success
13820 * Error codes on failure
13821 *
13822 *==========================================================================*/
13823int32_t QCamera3HardwareInterface::stopAllChannels()
13824{
13825 int32_t rc = NO_ERROR;
13826
13827 LOGD("Stopping all channels");
13828 // Stop the Streams/Channels
13829 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13830 it != mStreamInfo.end(); it++) {
13831 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13832 if (channel) {
13833 channel->stop();
13834 }
13835 (*it)->status = INVALID;
13836 }
13837
13838 if (mSupportChannel) {
13839 mSupportChannel->stop();
13840 }
13841 if (mAnalysisChannel) {
13842 mAnalysisChannel->stop();
13843 }
13844 if (mRawDumpChannel) {
13845 mRawDumpChannel->stop();
13846 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013847 if (mHdrPlusRawSrcChannel) {
13848 mHdrPlusRawSrcChannel->stop();
13849 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013850 if (mMetadataChannel) {
13851 /* If content of mStreamInfo is not 0, there is metadata stream */
13852 mMetadataChannel->stop();
13853 }
13854
13855 LOGD("All channels stopped");
13856 return rc;
13857}
13858
13859/*===========================================================================
13860 * FUNCTION : startAllChannels
13861 *
13862 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13863 *
13864 * PARAMETERS : None
13865 *
13866 * RETURN : NO_ERROR on success
13867 * Error codes on failure
13868 *
13869 *==========================================================================*/
13870int32_t QCamera3HardwareInterface::startAllChannels()
13871{
13872 int32_t rc = NO_ERROR;
13873
13874 LOGD("Start all channels ");
13875 // Start the Streams/Channels
13876 if (mMetadataChannel) {
13877 /* If content of mStreamInfo is not 0, there is metadata stream */
13878 rc = mMetadataChannel->start();
13879 if (rc < 0) {
13880 LOGE("META channel start failed");
13881 return rc;
13882 }
13883 }
13884 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13885 it != mStreamInfo.end(); it++) {
13886 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13887 if (channel) {
13888 rc = channel->start();
13889 if (rc < 0) {
13890 LOGE("channel start failed");
13891 return rc;
13892 }
13893 }
13894 }
13895 if (mAnalysisChannel) {
13896 mAnalysisChannel->start();
13897 }
13898 if (mSupportChannel) {
13899 rc = mSupportChannel->start();
13900 if (rc < 0) {
13901 LOGE("Support channel start failed");
13902 return rc;
13903 }
13904 }
13905 if (mRawDumpChannel) {
13906 rc = mRawDumpChannel->start();
13907 if (rc < 0) {
13908 LOGE("RAW dump channel start failed");
13909 return rc;
13910 }
13911 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013912 if (mHdrPlusRawSrcChannel) {
13913 rc = mHdrPlusRawSrcChannel->start();
13914 if (rc < 0) {
13915 LOGE("HDR+ RAW channel start failed");
13916 return rc;
13917 }
13918 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013919
13920 LOGD("All channels started");
13921 return rc;
13922}
13923
13924/*===========================================================================
13925 * FUNCTION : notifyErrorForPendingRequests
13926 *
13927 * DESCRIPTION: This function sends error for all the pending requests/buffers
13928 *
13929 * PARAMETERS : None
13930 *
13931 * RETURN : Error codes
13932 * NO_ERROR on success
13933 *
13934 *==========================================================================*/
13935int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13936{
Emilian Peev7650c122017-01-19 08:24:33 -080013937 notifyErrorFoPendingDepthData(mDepthChannel);
13938
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013939 auto pendingRequest = mPendingRequestsList.begin();
13940 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013941
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013942 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13943 // buffers (for which buffers aren't sent yet).
13944 while (pendingRequest != mPendingRequestsList.end() ||
13945 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13946 if (pendingRequest == mPendingRequestsList.end() ||
13947 pendingBuffer->frame_number < pendingRequest->frame_number) {
13948 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13949 // with error.
13950 for (auto &info : pendingBuffer->mPendingBufferList) {
13951 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013952 camera3_notify_msg_t notify_msg;
13953 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13954 notify_msg.type = CAMERA3_MSG_ERROR;
13955 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013956 notify_msg.message.error.error_stream = info.stream;
13957 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013958 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013959
13960 camera3_stream_buffer_t buffer = {};
13961 buffer.acquire_fence = -1;
13962 buffer.release_fence = -1;
13963 buffer.buffer = info.buffer;
13964 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13965 buffer.stream = info.stream;
13966 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070013967 }
13968
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013969 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13970 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
13971 pendingBuffer->frame_number > pendingRequest->frame_number) {
13972 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070013973 camera3_notify_msg_t notify_msg;
13974 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13975 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013976 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
13977 notify_msg.message.error.error_stream = nullptr;
13978 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013979 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013980
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013981 if (pendingRequest->input_buffer != nullptr) {
13982 camera3_capture_result result = {};
13983 result.frame_number = pendingRequest->frame_number;
13984 result.result = nullptr;
13985 result.input_buffer = pendingRequest->input_buffer;
13986 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013987 }
13988
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013989 mShutterDispatcher.clear(pendingRequest->frame_number);
13990 pendingRequest = mPendingRequestsList.erase(pendingRequest);
13991 } else {
13992 // If both buffers and result metadata weren't sent yet, notify about a request error
13993 // and return buffers with error.
13994 for (auto &info : pendingBuffer->mPendingBufferList) {
13995 camera3_notify_msg_t notify_msg;
13996 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13997 notify_msg.type = CAMERA3_MSG_ERROR;
13998 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13999 notify_msg.message.error.error_stream = info.stream;
14000 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14001 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014002
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014003 camera3_stream_buffer_t buffer = {};
14004 buffer.acquire_fence = -1;
14005 buffer.release_fence = -1;
14006 buffer.buffer = info.buffer;
14007 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14008 buffer.stream = info.stream;
14009 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14010 }
14011
14012 if (pendingRequest->input_buffer != nullptr) {
14013 camera3_capture_result result = {};
14014 result.frame_number = pendingRequest->frame_number;
14015 result.result = nullptr;
14016 result.input_buffer = pendingRequest->input_buffer;
14017 orchestrateResult(&result);
14018 }
14019
14020 mShutterDispatcher.clear(pendingRequest->frame_number);
14021 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14022 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014023 }
14024 }
14025
14026 /* Reset pending frame Drop list and requests list */
14027 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014028 mShutterDispatcher.clear();
14029 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014030 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014031 LOGH("Cleared all the pending buffers ");
14032
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014033 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014034}
14035
14036bool QCamera3HardwareInterface::isOnEncoder(
14037 const cam_dimension_t max_viewfinder_size,
14038 uint32_t width, uint32_t height)
14039{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014040 return ((width > (uint32_t)max_viewfinder_size.width) ||
14041 (height > (uint32_t)max_viewfinder_size.height) ||
14042 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14043 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014044}
14045
14046/*===========================================================================
14047 * FUNCTION : setBundleInfo
14048 *
14049 * DESCRIPTION: Set bundle info for all streams that are bundle.
14050 *
14051 * PARAMETERS : None
14052 *
14053 * RETURN : NO_ERROR on success
14054 * Error codes on failure
14055 *==========================================================================*/
14056int32_t QCamera3HardwareInterface::setBundleInfo()
14057{
14058 int32_t rc = NO_ERROR;
14059
14060 if (mChannelHandle) {
14061 cam_bundle_config_t bundleInfo;
14062 memset(&bundleInfo, 0, sizeof(bundleInfo));
14063 rc = mCameraHandle->ops->get_bundle_info(
14064 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14065 if (rc != NO_ERROR) {
14066 LOGE("get_bundle_info failed");
14067 return rc;
14068 }
14069 if (mAnalysisChannel) {
14070 mAnalysisChannel->setBundleInfo(bundleInfo);
14071 }
14072 if (mSupportChannel) {
14073 mSupportChannel->setBundleInfo(bundleInfo);
14074 }
14075 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14076 it != mStreamInfo.end(); it++) {
14077 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14078 channel->setBundleInfo(bundleInfo);
14079 }
14080 if (mRawDumpChannel) {
14081 mRawDumpChannel->setBundleInfo(bundleInfo);
14082 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014083 if (mHdrPlusRawSrcChannel) {
14084 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14085 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014086 }
14087
14088 return rc;
14089}
14090
14091/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014092 * FUNCTION : setInstantAEC
14093 *
14094 * DESCRIPTION: Set Instant AEC related params.
14095 *
14096 * PARAMETERS :
14097 * @meta: CameraMetadata reference
14098 *
14099 * RETURN : NO_ERROR on success
14100 * Error codes on failure
14101 *==========================================================================*/
14102int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14103{
14104 int32_t rc = NO_ERROR;
14105 uint8_t val = 0;
14106 char prop[PROPERTY_VALUE_MAX];
14107
14108 // First try to configure instant AEC from framework metadata
14109 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14110 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14111 }
14112
14113 // If framework did not set this value, try to read from set prop.
14114 if (val == 0) {
14115 memset(prop, 0, sizeof(prop));
14116 property_get("persist.camera.instant.aec", prop, "0");
14117 val = (uint8_t)atoi(prop);
14118 }
14119
14120 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14121 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14122 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14123 mInstantAEC = val;
14124 mInstantAECSettledFrameNumber = 0;
14125 mInstantAecFrameIdxCount = 0;
14126 LOGH("instantAEC value set %d",val);
14127 if (mInstantAEC) {
14128 memset(prop, 0, sizeof(prop));
14129 property_get("persist.camera.ae.instant.bound", prop, "10");
14130 int32_t aec_frame_skip_cnt = atoi(prop);
14131 if (aec_frame_skip_cnt >= 0) {
14132 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14133 } else {
14134 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14135 rc = BAD_VALUE;
14136 }
14137 }
14138 } else {
14139 LOGE("Bad instant aec value set %d", val);
14140 rc = BAD_VALUE;
14141 }
14142 return rc;
14143}
14144
14145/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014146 * FUNCTION : get_num_overall_buffers
14147 *
14148 * DESCRIPTION: Estimate number of pending buffers across all requests.
14149 *
14150 * PARAMETERS : None
14151 *
14152 * RETURN : Number of overall pending buffers
14153 *
14154 *==========================================================================*/
14155uint32_t PendingBuffersMap::get_num_overall_buffers()
14156{
14157 uint32_t sum_buffers = 0;
14158 for (auto &req : mPendingBuffersInRequest) {
14159 sum_buffers += req.mPendingBufferList.size();
14160 }
14161 return sum_buffers;
14162}
14163
14164/*===========================================================================
14165 * FUNCTION : removeBuf
14166 *
14167 * DESCRIPTION: Remove a matching buffer from tracker.
14168 *
14169 * PARAMETERS : @buffer: image buffer for the callback
14170 *
14171 * RETURN : None
14172 *
14173 *==========================================================================*/
14174void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14175{
14176 bool buffer_found = false;
14177 for (auto req = mPendingBuffersInRequest.begin();
14178 req != mPendingBuffersInRequest.end(); req++) {
14179 for (auto k = req->mPendingBufferList.begin();
14180 k != req->mPendingBufferList.end(); k++ ) {
14181 if (k->buffer == buffer) {
14182 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14183 req->frame_number, buffer);
14184 k = req->mPendingBufferList.erase(k);
14185 if (req->mPendingBufferList.empty()) {
14186 // Remove this request from Map
14187 req = mPendingBuffersInRequest.erase(req);
14188 }
14189 buffer_found = true;
14190 break;
14191 }
14192 }
14193 if (buffer_found) {
14194 break;
14195 }
14196 }
14197 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14198 get_num_overall_buffers());
14199}
14200
14201/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014202 * FUNCTION : getBufErrStatus
14203 *
14204 * DESCRIPTION: get buffer error status
14205 *
14206 * PARAMETERS : @buffer: buffer handle
14207 *
14208 * RETURN : Error status
14209 *
14210 *==========================================================================*/
14211int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14212{
14213 for (auto& req : mPendingBuffersInRequest) {
14214 for (auto& k : req.mPendingBufferList) {
14215 if (k.buffer == buffer)
14216 return k.bufStatus;
14217 }
14218 }
14219 return CAMERA3_BUFFER_STATUS_OK;
14220}
14221
14222/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014223 * FUNCTION : setPAAFSupport
14224 *
14225 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14226 * feature mask according to stream type and filter
14227 * arrangement
14228 *
14229 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14230 * @stream_type: stream type
14231 * @filter_arrangement: filter arrangement
14232 *
14233 * RETURN : None
14234 *==========================================================================*/
14235void QCamera3HardwareInterface::setPAAFSupport(
14236 cam_feature_mask_t& feature_mask,
14237 cam_stream_type_t stream_type,
14238 cam_color_filter_arrangement_t filter_arrangement)
14239{
Thierry Strudel3d639192016-09-09 11:52:26 -070014240 switch (filter_arrangement) {
14241 case CAM_FILTER_ARRANGEMENT_RGGB:
14242 case CAM_FILTER_ARRANGEMENT_GRBG:
14243 case CAM_FILTER_ARRANGEMENT_GBRG:
14244 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014245 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14246 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014247 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014248 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14249 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014250 }
14251 break;
14252 case CAM_FILTER_ARRANGEMENT_Y:
14253 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14254 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14255 }
14256 break;
14257 default:
14258 break;
14259 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014260 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14261 feature_mask, stream_type, filter_arrangement);
14262
14263
Thierry Strudel3d639192016-09-09 11:52:26 -070014264}
14265
14266/*===========================================================================
14267* FUNCTION : getSensorMountAngle
14268*
14269* DESCRIPTION: Retrieve sensor mount angle
14270*
14271* PARAMETERS : None
14272*
14273* RETURN : sensor mount angle in uint32_t
14274*==========================================================================*/
14275uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14276{
14277 return gCamCapability[mCameraId]->sensor_mount_angle;
14278}
14279
14280/*===========================================================================
14281* FUNCTION : getRelatedCalibrationData
14282*
14283* DESCRIPTION: Retrieve related system calibration data
14284*
14285* PARAMETERS : None
14286*
14287* RETURN : Pointer of related system calibration data
14288*==========================================================================*/
14289const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14290{
14291 return (const cam_related_system_calibration_data_t *)
14292 &(gCamCapability[mCameraId]->related_cam_calibration);
14293}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014294
14295/*===========================================================================
14296 * FUNCTION : is60HzZone
14297 *
14298 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14299 *
14300 * PARAMETERS : None
14301 *
14302 * RETURN : True if in 60Hz zone, False otherwise
14303 *==========================================================================*/
14304bool QCamera3HardwareInterface::is60HzZone()
14305{
14306 time_t t = time(NULL);
14307 struct tm lt;
14308
14309 struct tm* r = localtime_r(&t, &lt);
14310
14311 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14312 return true;
14313 else
14314 return false;
14315}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014316
14317/*===========================================================================
14318 * FUNCTION : adjustBlackLevelForCFA
14319 *
14320 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14321 * of bayer CFA (Color Filter Array).
14322 *
14323 * PARAMETERS : @input: black level pattern in the order of RGGB
14324 * @output: black level pattern in the order of CFA
14325 * @color_arrangement: CFA color arrangement
14326 *
14327 * RETURN : None
14328 *==========================================================================*/
14329template<typename T>
14330void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14331 T input[BLACK_LEVEL_PATTERN_CNT],
14332 T output[BLACK_LEVEL_PATTERN_CNT],
14333 cam_color_filter_arrangement_t color_arrangement)
14334{
14335 switch (color_arrangement) {
14336 case CAM_FILTER_ARRANGEMENT_GRBG:
14337 output[0] = input[1];
14338 output[1] = input[0];
14339 output[2] = input[3];
14340 output[3] = input[2];
14341 break;
14342 case CAM_FILTER_ARRANGEMENT_GBRG:
14343 output[0] = input[2];
14344 output[1] = input[3];
14345 output[2] = input[0];
14346 output[3] = input[1];
14347 break;
14348 case CAM_FILTER_ARRANGEMENT_BGGR:
14349 output[0] = input[3];
14350 output[1] = input[2];
14351 output[2] = input[1];
14352 output[3] = input[0];
14353 break;
14354 case CAM_FILTER_ARRANGEMENT_RGGB:
14355 output[0] = input[0];
14356 output[1] = input[1];
14357 output[2] = input[2];
14358 output[3] = input[3];
14359 break;
14360 default:
14361 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14362 break;
14363 }
14364}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014365
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014366void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14367 CameraMetadata &resultMetadata,
14368 std::shared_ptr<metadata_buffer_t> settings)
14369{
14370 if (settings == nullptr) {
14371 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14372 return;
14373 }
14374
14375 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14376 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14377 }
14378
14379 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14380 String8 str((const char *)gps_methods);
14381 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14382 }
14383
14384 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14385 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14386 }
14387
14388 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14389 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14390 }
14391
14392 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14393 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14394 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14395 }
14396
14397 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14398 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14399 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14400 }
14401
14402 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14403 int32_t fwk_thumb_size[2];
14404 fwk_thumb_size[0] = thumb_size->width;
14405 fwk_thumb_size[1] = thumb_size->height;
14406 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14407 }
14408
14409 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14410 uint8_t fwk_intent = intent[0];
14411 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14412 }
14413}
14414
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014415bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14416 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14417 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014418{
14419 if (hdrPlusRequest == nullptr) return false;
14420
14421 // Check noise reduction mode is high quality.
14422 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14423 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14424 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014425 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14426 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014427 return false;
14428 }
14429
14430 // Check edge mode is high quality.
14431 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14432 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14433 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14434 return false;
14435 }
14436
14437 if (request.num_output_buffers != 1 ||
14438 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14439 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014440 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14441 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14442 request.output_buffers[0].stream->width,
14443 request.output_buffers[0].stream->height,
14444 request.output_buffers[0].stream->format);
14445 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014446 return false;
14447 }
14448
14449 // Get a YUV buffer from pic channel.
14450 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14451 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14452 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14453 if (res != OK) {
14454 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14455 __FUNCTION__, strerror(-res), res);
14456 return false;
14457 }
14458
14459 pbcamera::StreamBuffer buffer;
14460 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014461 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014462 buffer.data = yuvBuffer->buffer;
14463 buffer.dataSize = yuvBuffer->frame_len;
14464
14465 pbcamera::CaptureRequest pbRequest;
14466 pbRequest.id = request.frame_number;
14467 pbRequest.outputBuffers.push_back(buffer);
14468
14469 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014470 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014471 if (res != OK) {
14472 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14473 strerror(-res), res);
14474 return false;
14475 }
14476
14477 hdrPlusRequest->yuvBuffer = yuvBuffer;
14478 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14479
14480 return true;
14481}
14482
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014483status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14484{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014485 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14486 return OK;
14487 }
14488
14489 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14490 if (res != OK) {
14491 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14492 strerror(-res), res);
14493 return res;
14494 }
14495 gHdrPlusClientOpening = true;
14496
14497 return OK;
14498}
14499
Chien-Yu Chenee335912017-02-09 17:53:20 -080014500status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14501{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014502 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014503
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014504 if (mHdrPlusModeEnabled) {
14505 return OK;
14506 }
14507
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014508 // Check if gHdrPlusClient is opened or being opened.
14509 if (gHdrPlusClient == nullptr) {
14510 if (gHdrPlusClientOpening) {
14511 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14512 return OK;
14513 }
14514
14515 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014516 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014517 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14518 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014519 return res;
14520 }
14521
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014522 // When opening HDR+ client completes, HDR+ mode will be enabled.
14523 return OK;
14524
Chien-Yu Chenee335912017-02-09 17:53:20 -080014525 }
14526
14527 // Configure stream for HDR+.
14528 res = configureHdrPlusStreamsLocked();
14529 if (res != OK) {
14530 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014531 return res;
14532 }
14533
14534 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14535 res = gHdrPlusClient->setZslHdrPlusMode(true);
14536 if (res != OK) {
14537 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014538 return res;
14539 }
14540
14541 mHdrPlusModeEnabled = true;
14542 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14543
14544 return OK;
14545}
14546
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014547void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14548{
14549 if (gHdrPlusClientOpening) {
14550 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14551 }
14552}
14553
Chien-Yu Chenee335912017-02-09 17:53:20 -080014554void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14555{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014556 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014557 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014558 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14559 if (res != OK) {
14560 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14561 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014562
14563 // Close HDR+ client so Easel can enter low power mode.
14564 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14565 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014566 }
14567
14568 mHdrPlusModeEnabled = false;
14569 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14570}
14571
14572status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014573{
14574 pbcamera::InputConfiguration inputConfig;
14575 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14576 status_t res = OK;
14577
14578 // Configure HDR+ client streams.
14579 // Get input config.
14580 if (mHdrPlusRawSrcChannel) {
14581 // HDR+ input buffers will be provided by HAL.
14582 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14583 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14584 if (res != OK) {
14585 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14586 __FUNCTION__, strerror(-res), res);
14587 return res;
14588 }
14589
14590 inputConfig.isSensorInput = false;
14591 } else {
14592 // Sensor MIPI will send data to Easel.
14593 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014594 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014595 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14596 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14597 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14598 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14599 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014600 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014601 if (mSensorModeInfo.num_raw_bits != 10) {
14602 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14603 mSensorModeInfo.num_raw_bits);
14604 return BAD_VALUE;
14605 }
14606
14607 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014608 }
14609
14610 // Get output configurations.
14611 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014612 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014613
14614 // Easel may need to output YUV output buffers if mPictureChannel was created.
14615 pbcamera::StreamConfiguration yuvOutputConfig;
14616 if (mPictureChannel != nullptr) {
14617 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14618 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14619 if (res != OK) {
14620 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14621 __FUNCTION__, strerror(-res), res);
14622
14623 return res;
14624 }
14625
14626 outputStreamConfigs.push_back(yuvOutputConfig);
14627 }
14628
14629 // TODO: consider other channels for YUV output buffers.
14630
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014631 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014632 if (res != OK) {
14633 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14634 strerror(-res), res);
14635 return res;
14636 }
14637
14638 return OK;
14639}
14640
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014641void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14642{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014643 if (client == nullptr) {
14644 ALOGE("%s: Opened client is null.", __FUNCTION__);
14645 return;
14646 }
14647
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014648 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014649 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14650
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014651 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014652 if (!gHdrPlusClientOpening) {
14653 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14654 return;
14655 }
14656
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014657 gHdrPlusClient = std::move(client);
14658 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014659 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014660
14661 // Set static metadata.
14662 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14663 if (res != OK) {
14664 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14665 __FUNCTION__, strerror(-res), res);
14666 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14667 gHdrPlusClient = nullptr;
14668 return;
14669 }
14670
14671 // Enable HDR+ mode.
14672 res = enableHdrPlusModeLocked();
14673 if (res != OK) {
14674 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14675 }
14676}
14677
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014678void QCamera3HardwareInterface::onOpenFailed(status_t err)
14679{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014680 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014681 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014682 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014683 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014684}
14685
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014686void QCamera3HardwareInterface::onFatalError()
14687{
14688 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14689
14690 // Set HAL state to error.
14691 pthread_mutex_lock(&mMutex);
14692 mState = ERROR;
14693 pthread_mutex_unlock(&mMutex);
14694
14695 handleCameraDeviceError();
14696}
14697
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014698void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014699 const camera_metadata_t &resultMetadata)
14700{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014701 if (result != nullptr) {
14702 if (result->outputBuffers.size() != 1) {
14703 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14704 result->outputBuffers.size());
14705 return;
14706 }
14707
14708 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14709 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14710 result->outputBuffers[0].streamId);
14711 return;
14712 }
14713
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014714 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014715 HdrPlusPendingRequest pendingRequest;
14716 {
14717 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14718 auto req = mHdrPlusPendingRequests.find(result->requestId);
14719 pendingRequest = req->second;
14720 }
14721
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014722 // Update the result metadata with the settings of the HDR+ still capture request because
14723 // the result metadata belongs to a ZSL buffer.
14724 CameraMetadata metadata;
14725 metadata = &resultMetadata;
14726 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14727 camera_metadata_t* updatedResultMetadata = metadata.release();
14728
14729 QCamera3PicChannel *picChannel =
14730 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14731
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014732 // Check if dumping HDR+ YUV output is enabled.
14733 char prop[PROPERTY_VALUE_MAX];
14734 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14735 bool dumpYuvOutput = atoi(prop);
14736
14737 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014738 // Dump yuv buffer to a ppm file.
14739 pbcamera::StreamConfiguration outputConfig;
14740 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14741 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14742 if (rc == OK) {
14743 char buf[FILENAME_MAX] = {};
14744 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14745 result->requestId, result->outputBuffers[0].streamId,
14746 outputConfig.image.width, outputConfig.image.height);
14747
14748 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14749 } else {
14750 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14751 __FUNCTION__, strerror(-rc), rc);
14752 }
14753 }
14754
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014755 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14756 auto halMetadata = std::make_shared<metadata_buffer_t>();
14757 clear_metadata_buffer(halMetadata.get());
14758
14759 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14760 // encoding.
14761 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14762 halStreamId, /*minFrameDuration*/0);
14763 if (res == OK) {
14764 // Return the buffer to pic channel for encoding.
14765 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14766 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14767 halMetadata);
14768 } else {
14769 // Return the buffer without encoding.
14770 // TODO: This should not happen but we may want to report an error buffer to camera
14771 // service.
14772 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14773 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14774 strerror(-res), res);
14775 }
14776
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014777 // Find the timestamp
14778 camera_metadata_ro_entry_t entry;
14779 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14780 ANDROID_SENSOR_TIMESTAMP, &entry);
14781 if (res != OK) {
14782 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14783 __FUNCTION__, result->requestId, strerror(-res), res);
14784 } else {
14785 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14786 }
14787
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014788 // Send HDR+ metadata to framework.
14789 {
14790 pthread_mutex_lock(&mMutex);
14791
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014792 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14793 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014794 pthread_mutex_unlock(&mMutex);
14795 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014796
14797 // Remove the HDR+ pending request.
14798 {
14799 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14800 auto req = mHdrPlusPendingRequests.find(result->requestId);
14801 mHdrPlusPendingRequests.erase(req);
14802 }
14803 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014804}
14805
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014806void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14807{
14808 if (failedResult == nullptr) {
14809 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14810 return;
14811 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014812
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014813 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014814
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014815 // Remove the pending HDR+ request.
14816 {
14817 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14818 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14819
14820 // Return the buffer to pic channel.
14821 QCamera3PicChannel *picChannel =
14822 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14823 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14824
14825 mHdrPlusPendingRequests.erase(pendingRequest);
14826 }
14827
14828 pthread_mutex_lock(&mMutex);
14829
14830 // Find the pending buffers.
14831 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14832 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14833 if (pendingBuffers->frame_number == failedResult->requestId) {
14834 break;
14835 }
14836 pendingBuffers++;
14837 }
14838
14839 // Send out buffer errors for the pending buffers.
14840 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14841 std::vector<camera3_stream_buffer_t> streamBuffers;
14842 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14843 // Prepare a stream buffer.
14844 camera3_stream_buffer_t streamBuffer = {};
14845 streamBuffer.stream = buffer.stream;
14846 streamBuffer.buffer = buffer.buffer;
14847 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14848 streamBuffer.acquire_fence = -1;
14849 streamBuffer.release_fence = -1;
14850
14851 streamBuffers.push_back(streamBuffer);
14852
14853 // Send out error buffer event.
14854 camera3_notify_msg_t notify_msg = {};
14855 notify_msg.type = CAMERA3_MSG_ERROR;
14856 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14857 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14858 notify_msg.message.error.error_stream = buffer.stream;
14859
14860 orchestrateNotify(&notify_msg);
14861 }
14862
14863 camera3_capture_result_t result = {};
14864 result.frame_number = pendingBuffers->frame_number;
14865 result.num_output_buffers = streamBuffers.size();
14866 result.output_buffers = &streamBuffers[0];
14867
14868 // Send out result with buffer errors.
14869 orchestrateResult(&result);
14870
14871 // Remove pending buffers.
14872 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14873 }
14874
14875 // Remove pending request.
14876 auto halRequest = mPendingRequestsList.begin();
14877 while (halRequest != mPendingRequestsList.end()) {
14878 if (halRequest->frame_number == failedResult->requestId) {
14879 mPendingRequestsList.erase(halRequest);
14880 break;
14881 }
14882 halRequest++;
14883 }
14884
14885 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014886}
14887
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014888
14889ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14890 mParent(parent) {}
14891
14892void ShutterDispatcher::expectShutter(uint32_t frameNumber)
14893{
14894 std::lock_guard<std::mutex> lock(mLock);
14895 mShutters.emplace(frameNumber, Shutter());
14896}
14897
14898void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14899{
14900 std::lock_guard<std::mutex> lock(mLock);
14901
14902 // Make this frame's shutter ready.
14903 auto shutter = mShutters.find(frameNumber);
14904 if (shutter == mShutters.end()) {
14905 // Shutter was already sent.
14906 return;
14907 }
14908
14909 shutter->second.ready = true;
14910 shutter->second.timestamp = timestamp;
14911
14912 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
14913 shutter = mShutters.begin();
14914 while (shutter != mShutters.end()) {
14915 if (!shutter->second.ready) {
14916 // If this shutter is not ready, the following shutters can't be sent.
14917 break;
14918 }
14919
14920 camera3_notify_msg_t msg = {};
14921 msg.type = CAMERA3_MSG_SHUTTER;
14922 msg.message.shutter.frame_number = shutter->first;
14923 msg.message.shutter.timestamp = shutter->second.timestamp;
14924 mParent->orchestrateNotify(&msg);
14925
14926 shutter = mShutters.erase(shutter);
14927 }
14928}
14929
14930void ShutterDispatcher::clear(uint32_t frameNumber)
14931{
14932 std::lock_guard<std::mutex> lock(mLock);
14933 mShutters.erase(frameNumber);
14934}
14935
14936void ShutterDispatcher::clear()
14937{
14938 std::lock_guard<std::mutex> lock(mLock);
14939
14940 // Log errors for stale shutters.
14941 for (auto &shutter : mShutters) {
14942 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14943 __FUNCTION__, shutter.first, shutter.second.ready,
14944 shutter.second.timestamp);
14945 }
14946 mShutters.clear();
14947}
14948
14949OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
14950 mParent(parent) {}
14951
14952status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
14953{
14954 std::lock_guard<std::mutex> lock(mLock);
14955 mStreamBuffers.clear();
14956 if (!streamList) {
14957 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
14958 return -EINVAL;
14959 }
14960
14961 // Create a "frame-number -> buffer" map for each stream.
14962 for (uint32_t i = 0; i < streamList->num_streams; i++) {
14963 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
14964 }
14965
14966 return OK;
14967}
14968
14969status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
14970{
14971 std::lock_guard<std::mutex> lock(mLock);
14972
14973 // Find the "frame-number -> buffer" map for the stream.
14974 auto buffers = mStreamBuffers.find(stream);
14975 if (buffers == mStreamBuffers.end()) {
14976 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
14977 return -EINVAL;
14978 }
14979
14980 // Create an unready buffer for this frame number.
14981 buffers->second.emplace(frameNumber, Buffer());
14982 return OK;
14983}
14984
14985void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
14986 const camera3_stream_buffer_t &buffer)
14987{
14988 std::lock_guard<std::mutex> lock(mLock);
14989
14990 // Find the frame number -> buffer map for the stream.
14991 auto buffers = mStreamBuffers.find(buffer.stream);
14992 if (buffers == mStreamBuffers.end()) {
14993 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
14994 return;
14995 }
14996
14997 // Find the unready buffer this frame number and mark it ready.
14998 auto pendingBuffer = buffers->second.find(frameNumber);
14999 if (pendingBuffer == buffers->second.end()) {
15000 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15001 return;
15002 }
15003
15004 pendingBuffer->second.ready = true;
15005 pendingBuffer->second.buffer = buffer;
15006
15007 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15008 pendingBuffer = buffers->second.begin();
15009 while (pendingBuffer != buffers->second.end()) {
15010 if (!pendingBuffer->second.ready) {
15011 // If this buffer is not ready, the following buffers can't be sent.
15012 break;
15013 }
15014
15015 camera3_capture_result_t result = {};
15016 result.frame_number = pendingBuffer->first;
15017 result.num_output_buffers = 1;
15018 result.output_buffers = &pendingBuffer->second.buffer;
15019
15020 // Send out result with buffer errors.
15021 mParent->orchestrateResult(&result);
15022
15023 pendingBuffer = buffers->second.erase(pendingBuffer);
15024 }
15025}
15026
15027void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15028{
15029 std::lock_guard<std::mutex> lock(mLock);
15030
15031 // Log errors for stale buffers.
15032 for (auto &buffers : mStreamBuffers) {
15033 for (auto &buffer : buffers.second) {
15034 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15035 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15036 }
15037 buffers.second.clear();
15038 }
15039
15040 if (clearConfiguredStreams) {
15041 mStreamBuffers.clear();
15042 }
15043}
15044
Thierry Strudel3d639192016-09-09 11:52:26 -070015045}; //end namespace qcamera