blob: e75e05772914dce745cd1a823c40115e5a344924 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
Jiyong Parkd4caeb72017-06-12 17:16:36 +090068using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070069using namespace android;
70
71namespace qcamera {
72
73#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
74
75#define EMPTY_PIPELINE_DELAY 2
76#define PARTIAL_RESULT_COUNT 2
77#define FRAME_SKIP_DELAY 0
78
79#define MAX_VALUE_8BIT ((1<<8)-1)
80#define MAX_VALUE_10BIT ((1<<10)-1)
81#define MAX_VALUE_12BIT ((1<<12)-1)
82
83#define VIDEO_4K_WIDTH 3840
84#define VIDEO_4K_HEIGHT 2160
85
Jason Leeb9e76432017-03-10 17:14:19 -080086#define MAX_EIS_WIDTH 3840
87#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070088
89#define MAX_RAW_STREAMS 1
90#define MAX_STALLING_STREAMS 1
91#define MAX_PROCESSED_STREAMS 3
92/* Batch mode is enabled only if FPS set is equal to or greater than this */
93#define MIN_FPS_FOR_BATCH_MODE (120)
94#define PREVIEW_FPS_FOR_HFR (30)
95#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080096#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070097#define MAX_HFR_BATCH_SIZE (8)
98#define REGIONS_TUPLE_COUNT 5
99#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
101#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700159std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
279};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_flash_mode_t,
283 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
284 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
285 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
286 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
287};
288
289const QCamera3HardwareInterface::QCameraMap<
290 camera_metadata_enum_android_statistics_face_detect_mode_t,
291 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
295};
296
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
299 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
301 CAM_FOCUS_UNCALIBRATED },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
303 CAM_FOCUS_APPROXIMATE },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
305 CAM_FOCUS_CALIBRATED }
306};
307
308const QCamera3HardwareInterface::QCameraMap<
309 camera_metadata_enum_android_lens_state_t,
310 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
311 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
312 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
313};
314
315const int32_t available_thumbnail_sizes[] = {0, 0,
316 176, 144,
317 240, 144,
318 256, 144,
319 240, 160,
320 256, 154,
321 240, 240,
322 320, 240};
323
324const QCamera3HardwareInterface::QCameraMap<
325 camera_metadata_enum_android_sensor_test_pattern_mode_t,
326 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
333};
334
335/* Since there is no mapping for all the options some Android enum are not listed.
336 * Also, the order in this list is important because while mapping from HAL to Android it will
337 * traverse from lower to higher index which means that for HAL values that are map to different
338 * Android values, the traverse logic will select the first one found.
339 */
340const QCamera3HardwareInterface::QCameraMap<
341 camera_metadata_enum_android_sensor_reference_illuminant1_t,
342 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
359};
360
361const QCamera3HardwareInterface::QCameraMap<
362 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
363 { 60, CAM_HFR_MODE_60FPS},
364 { 90, CAM_HFR_MODE_90FPS},
365 { 120, CAM_HFR_MODE_120FPS},
366 { 150, CAM_HFR_MODE_150FPS},
367 { 180, CAM_HFR_MODE_180FPS},
368 { 210, CAM_HFR_MODE_210FPS},
369 { 240, CAM_HFR_MODE_240FPS},
370 { 480, CAM_HFR_MODE_480FPS},
371};
372
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700373const QCamera3HardwareInterface::QCameraMap<
374 qcamera3_ext_instant_aec_mode_t,
375 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
376 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
377 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
379};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800380
381const QCamera3HardwareInterface::QCameraMap<
382 qcamera3_ext_exposure_meter_mode_t,
383 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
384 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
386 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
387 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
388 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
390 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
391};
392
393const QCamera3HardwareInterface::QCameraMap<
394 qcamera3_ext_iso_mode_t,
395 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
396 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
397 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
398 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
399 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
400 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
401 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
402 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
403 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
404};
405
Thierry Strudel3d639192016-09-09 11:52:26 -0700406camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
407 .initialize = QCamera3HardwareInterface::initialize,
408 .configure_streams = QCamera3HardwareInterface::configure_streams,
409 .register_stream_buffers = NULL,
410 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
411 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
412 .get_metadata_vendor_tag_ops = NULL,
413 .dump = QCamera3HardwareInterface::dump,
414 .flush = QCamera3HardwareInterface::flush,
415 .reserved = {0},
416};
417
418// initialise to some default value
419uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
420
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700421static inline void logEaselEvent(const char *tag, const char *event) {
422 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
423 struct timespec ts = {};
424 static int64_t kMsPerSec = 1000;
425 static int64_t kNsPerMs = 1000000;
426 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
427 if (res != OK) {
428 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
429 } else {
430 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
431 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
432 }
433 }
434}
435
Thierry Strudel3d639192016-09-09 11:52:26 -0700436/*===========================================================================
437 * FUNCTION : QCamera3HardwareInterface
438 *
439 * DESCRIPTION: constructor of QCamera3HardwareInterface
440 *
441 * PARAMETERS :
442 * @cameraId : camera ID
443 *
444 * RETURN : none
445 *==========================================================================*/
446QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
447 const camera_module_callbacks_t *callbacks)
448 : mCameraId(cameraId),
449 mCameraHandle(NULL),
450 mCameraInitialized(false),
451 mCallbackOps(NULL),
452 mMetadataChannel(NULL),
453 mPictureChannel(NULL),
454 mRawChannel(NULL),
455 mSupportChannel(NULL),
456 mAnalysisChannel(NULL),
457 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700458 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700459 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800460 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100461 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800462 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mChannelHandle(0),
464 mFirstConfiguration(true),
465 mFlush(false),
466 mFlushPerf(false),
467 mParamHeap(NULL),
468 mParameters(NULL),
469 mPrevParameters(NULL),
470 m_bIsVideo(false),
471 m_bIs4KVideo(false),
472 m_bEisSupportedSize(false),
473 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800474 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700476 mShutterDispatcher(this),
477 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mMinProcessedFrameDuration(0),
479 mMinJpegFrameDuration(0),
480 mMinRawFrameDuration(0),
481 mMetaFrameCount(0U),
482 mUpdateDebugLevel(false),
483 mCallbacks(callbacks),
484 mCaptureIntent(0),
485 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700486 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800487 /* DevCamDebug metadata internal m control*/
488 mDevCamDebugMetaEnable(0),
489 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700490 mBatchSize(0),
491 mToBeQueuedVidBufs(0),
492 mHFRVideoFps(DEFAULT_VIDEO_FPS),
493 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800494 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800495 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700496 mFirstFrameNumberInBatch(0),
497 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800498 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700499 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
500 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000501 mPDSupported(false),
502 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700503 mInstantAEC(false),
504 mResetInstantAEC(false),
505 mInstantAECSettledFrameNumber(0),
506 mAecSkipDisplayFrameBound(0),
507 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800508 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700509 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700511 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mState(CLOSED),
513 mIsDeviceLinked(false),
514 mIsMainCamera(true),
515 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700516 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800517 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800518 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700519 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mIsApInputUsedForHdrPlus(false),
521 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800522 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700523{
524 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700525 mCommon.init(gCamCapability[cameraId]);
526 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700527#ifndef USE_HAL_3_3
528 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
529#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700531#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.close = close_camera_device;
533 mCameraDevice.ops = &mCameraOps;
534 mCameraDevice.priv = this;
535 gCamCapability[cameraId]->version = CAM_HAL_V3;
536 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
537 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
538 gCamCapability[cameraId]->min_num_pp_bufs = 3;
539
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800540 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543 mPendingLiveRequest = 0;
544 mCurrentRequestId = -1;
545 pthread_mutex_init(&mMutex, NULL);
546
547 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
548 mDefaultMetadata[i] = NULL;
549
550 // Getting system props of different kinds
551 char prop[PROPERTY_VALUE_MAX];
552 memset(prop, 0, sizeof(prop));
553 property_get("persist.camera.raw.dump", prop, "0");
554 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800555 property_get("persist.camera.hal3.force.hdr", prop, "0");
556 mForceHdrSnapshot = atoi(prop);
557
Thierry Strudel3d639192016-09-09 11:52:26 -0700558 if (mEnableRawDump)
559 LOGD("Raw dump from Camera HAL enabled");
560
561 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
562 memset(mLdafCalib, 0, sizeof(mLdafCalib));
563
564 memset(prop, 0, sizeof(prop));
565 property_get("persist.camera.tnr.preview", prop, "0");
566 m_bTnrPreview = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800569 property_get("persist.camera.swtnr.preview", prop, "1");
570 m_bSwTnrPreview = (uint8_t)atoi(prop);
571
572 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700573 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700574 m_bTnrVideo = (uint8_t)atoi(prop);
575
576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.avtimer.debug", prop, "0");
578 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800579 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700580
Thierry Strudel54dc9782017-02-15 12:12:10 -0800581 memset(prop, 0, sizeof(prop));
582 property_get("persist.camera.cacmode.disable", prop, "0");
583 m_cacModeDisabled = (uint8_t)atoi(prop);
584
Thierry Strudel3d639192016-09-09 11:52:26 -0700585 //Load and read GPU library.
586 lib_surface_utils = NULL;
587 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700588 mSurfaceStridePadding = CAM_PAD_TO_64;
589#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700590 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
591 if (lib_surface_utils) {
592 *(void **)&LINK_get_surface_pixel_alignment =
593 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
594 if (LINK_get_surface_pixel_alignment) {
595 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
596 }
597 dlclose(lib_surface_utils);
598 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700599#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000600 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
601 mPDSupported = (0 <= mPDIndex) ? true : false;
602
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700603 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700604}
605
606/*===========================================================================
607 * FUNCTION : ~QCamera3HardwareInterface
608 *
609 * DESCRIPTION: destructor of QCamera3HardwareInterface
610 *
611 * PARAMETERS : none
612 *
613 * RETURN : none
614 *==========================================================================*/
615QCamera3HardwareInterface::~QCamera3HardwareInterface()
616{
617 LOGD("E");
618
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800619 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700620
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800621 // Disable power hint and enable the perf lock for close camera
622 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
623 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
624
625 // unlink of dualcam during close camera
626 if (mIsDeviceLinked) {
627 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
628 &m_pDualCamCmdPtr->bundle_info;
629 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
630 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
631 pthread_mutex_lock(&gCamLock);
632
633 if (mIsMainCamera == 1) {
634 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
635 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
636 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
637 // related session id should be session id of linked session
638 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
639 } else {
640 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
641 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
642 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
643 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
644 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800645 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800646 pthread_mutex_unlock(&gCamLock);
647
648 rc = mCameraHandle->ops->set_dual_cam_cmd(
649 mCameraHandle->camera_handle);
650 if (rc < 0) {
651 LOGE("Dualcam: Unlink failed, but still proceed to close");
652 }
653 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700654
655 /* We need to stop all streams before deleting any stream */
656 if (mRawDumpChannel) {
657 mRawDumpChannel->stop();
658 }
659
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700660 if (mHdrPlusRawSrcChannel) {
661 mHdrPlusRawSrcChannel->stop();
662 }
663
Thierry Strudel3d639192016-09-09 11:52:26 -0700664 // NOTE: 'camera3_stream_t *' objects are already freed at
665 // this stage by the framework
666 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
667 it != mStreamInfo.end(); it++) {
668 QCamera3ProcessingChannel *channel = (*it)->channel;
669 if (channel) {
670 channel->stop();
671 }
672 }
673 if (mSupportChannel)
674 mSupportChannel->stop();
675
676 if (mAnalysisChannel) {
677 mAnalysisChannel->stop();
678 }
679 if (mMetadataChannel) {
680 mMetadataChannel->stop();
681 }
682 if (mChannelHandle) {
683 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
684 mChannelHandle);
685 LOGD("stopping channel %d", mChannelHandle);
686 }
687
688 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
689 it != mStreamInfo.end(); it++) {
690 QCamera3ProcessingChannel *channel = (*it)->channel;
691 if (channel)
692 delete channel;
693 free (*it);
694 }
695 if (mSupportChannel) {
696 delete mSupportChannel;
697 mSupportChannel = NULL;
698 }
699
700 if (mAnalysisChannel) {
701 delete mAnalysisChannel;
702 mAnalysisChannel = NULL;
703 }
704 if (mRawDumpChannel) {
705 delete mRawDumpChannel;
706 mRawDumpChannel = NULL;
707 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700708 if (mHdrPlusRawSrcChannel) {
709 delete mHdrPlusRawSrcChannel;
710 mHdrPlusRawSrcChannel = NULL;
711 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 if (mDummyBatchChannel) {
713 delete mDummyBatchChannel;
714 mDummyBatchChannel = NULL;
715 }
716
717 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800718 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700719
720 if (mMetadataChannel) {
721 delete mMetadataChannel;
722 mMetadataChannel = NULL;
723 }
724
725 /* Clean up all channels */
726 if (mCameraInitialized) {
727 if(!mFirstConfiguration){
728 //send the last unconfigure
729 cam_stream_size_info_t stream_config_info;
730 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
731 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
732 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800733 m_bIs4KVideo ? 0 :
734 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700735 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700736 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
737 stream_config_info);
738 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
739 if (rc < 0) {
740 LOGE("set_parms failed for unconfigure");
741 }
742 }
743 deinitParameters();
744 }
745
746 if (mChannelHandle) {
747 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
748 mChannelHandle);
749 LOGH("deleting channel %d", mChannelHandle);
750 mChannelHandle = 0;
751 }
752
753 if (mState != CLOSED)
754 closeCamera();
755
756 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
757 req.mPendingBufferList.clear();
758 }
759 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700760 for (pendingRequestIterator i = mPendingRequestsList.begin();
761 i != mPendingRequestsList.end();) {
762 i = erasePendingRequest(i);
763 }
764 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
765 if (mDefaultMetadata[i])
766 free_camera_metadata(mDefaultMetadata[i]);
767
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800768 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700769
770 pthread_cond_destroy(&mRequestCond);
771
772 pthread_cond_destroy(&mBuffersCond);
773
774 pthread_mutex_destroy(&mMutex);
775 LOGD("X");
776}
777
778/*===========================================================================
779 * FUNCTION : erasePendingRequest
780 *
781 * DESCRIPTION: function to erase a desired pending request after freeing any
782 * allocated memory
783 *
784 * PARAMETERS :
785 * @i : iterator pointing to pending request to be erased
786 *
787 * RETURN : iterator pointing to the next request
788 *==========================================================================*/
789QCamera3HardwareInterface::pendingRequestIterator
790 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
791{
792 if (i->input_buffer != NULL) {
793 free(i->input_buffer);
794 i->input_buffer = NULL;
795 }
796 if (i->settings != NULL)
797 free_camera_metadata((camera_metadata_t*)i->settings);
798 return mPendingRequestsList.erase(i);
799}
800
801/*===========================================================================
802 * FUNCTION : camEvtHandle
803 *
804 * DESCRIPTION: Function registered to mm-camera-interface to handle events
805 *
806 * PARAMETERS :
807 * @camera_handle : interface layer camera handle
808 * @evt : ptr to event
809 * @user_data : user data ptr
810 *
811 * RETURN : none
812 *==========================================================================*/
813void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
814 mm_camera_event_t *evt,
815 void *user_data)
816{
817 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
818 if (obj && evt) {
819 switch(evt->server_event_type) {
820 case CAM_EVENT_TYPE_DAEMON_DIED:
821 pthread_mutex_lock(&obj->mMutex);
822 obj->mState = ERROR;
823 pthread_mutex_unlock(&obj->mMutex);
824 LOGE("Fatal, camera daemon died");
825 break;
826
827 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
828 LOGD("HAL got request pull from Daemon");
829 pthread_mutex_lock(&obj->mMutex);
830 obj->mWokenUpByDaemon = true;
831 obj->unblockRequestIfNecessary();
832 pthread_mutex_unlock(&obj->mMutex);
833 break;
834
835 default:
836 LOGW("Warning: Unhandled event %d",
837 evt->server_event_type);
838 break;
839 }
840 } else {
841 LOGE("NULL user_data/evt");
842 }
843}
844
845/*===========================================================================
846 * FUNCTION : openCamera
847 *
848 * DESCRIPTION: open camera
849 *
850 * PARAMETERS :
851 * @hw_device : double ptr for camera device struct
852 *
853 * RETURN : int32_t type of status
854 * NO_ERROR -- success
855 * none-zero failure code
856 *==========================================================================*/
857int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
858{
859 int rc = 0;
860 if (mState != CLOSED) {
861 *hw_device = NULL;
862 return PERMISSION_DENIED;
863 }
864
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700865 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800866 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700867 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
868 mCameraId);
869
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700870 if (mCameraHandle) {
871 LOGE("Failure: Camera already opened");
872 return ALREADY_EXISTS;
873 }
874
875 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700876 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700877 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700878 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700879 rc = gEaselManagerClient->resume();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700880 if (rc != 0) {
881 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
882 return rc;
883 }
884 }
885 }
886
Thierry Strudel3d639192016-09-09 11:52:26 -0700887 rc = openCamera();
888 if (rc == 0) {
889 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800890 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700891 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700892
893 // Suspend Easel because opening camera failed.
894 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700895 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700896 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
897 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700898 if (suspendErr != 0) {
899 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
900 strerror(-suspendErr), suspendErr);
901 }
902 }
903 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800904 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700905
Thierry Strudel3d639192016-09-09 11:52:26 -0700906 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
907 mCameraId, rc);
908
909 if (rc == NO_ERROR) {
910 mState = OPENED;
911 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800912
Thierry Strudel3d639192016-09-09 11:52:26 -0700913 return rc;
914}
915
916/*===========================================================================
917 * FUNCTION : openCamera
918 *
919 * DESCRIPTION: open camera
920 *
921 * PARAMETERS : none
922 *
923 * RETURN : int32_t type of status
924 * NO_ERROR -- success
925 * none-zero failure code
926 *==========================================================================*/
927int QCamera3HardwareInterface::openCamera()
928{
929 int rc = 0;
930 char value[PROPERTY_VALUE_MAX];
931
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800932 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800933
Thierry Strudel3d639192016-09-09 11:52:26 -0700934 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
935 if (rc < 0) {
936 LOGE("Failed to reserve flash for camera id: %d",
937 mCameraId);
938 return UNKNOWN_ERROR;
939 }
940
941 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
942 if (rc) {
943 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
944 return rc;
945 }
946
947 if (!mCameraHandle) {
948 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
949 return -ENODEV;
950 }
951
952 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
953 camEvtHandle, (void *)this);
954
955 if (rc < 0) {
956 LOGE("Error, failed to register event callback");
957 /* Not closing camera here since it is already handled in destructor */
958 return FAILED_TRANSACTION;
959 }
960
961 mExifParams.debug_params =
962 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
963 if (mExifParams.debug_params) {
964 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
965 } else {
966 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
967 return NO_MEMORY;
968 }
969 mFirstConfiguration = true;
970
971 //Notify display HAL that a camera session is active.
972 //But avoid calling the same during bootup because camera service might open/close
973 //cameras at boot time during its initialization and display service will also internally
974 //wait for camera service to initialize first while calling this display API, resulting in a
975 //deadlock situation. Since boot time camera open/close calls are made only to fetch
976 //capabilities, no need of this display bw optimization.
977 //Use "service.bootanim.exit" property to know boot status.
978 property_get("service.bootanim.exit", value, "0");
979 if (atoi(value) == 1) {
980 pthread_mutex_lock(&gCamLock);
981 if (gNumCameraSessions++ == 0) {
982 setCameraLaunchStatus(true);
983 }
984 pthread_mutex_unlock(&gCamLock);
985 }
986
987 //fill the session id needed while linking dual cam
988 pthread_mutex_lock(&gCamLock);
989 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
990 &sessionId[mCameraId]);
991 pthread_mutex_unlock(&gCamLock);
992
993 if (rc < 0) {
994 LOGE("Error, failed to get sessiion id");
995 return UNKNOWN_ERROR;
996 } else {
997 //Allocate related cam sync buffer
998 //this is needed for the payload that goes along with bundling cmd for related
999 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001000 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1001 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001002 if(rc != OK) {
1003 rc = NO_MEMORY;
1004 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1005 return NO_MEMORY;
1006 }
1007
1008 //Map memory for related cam sync buffer
1009 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001010 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1011 m_pDualCamCmdHeap->getFd(0),
1012 sizeof(cam_dual_camera_cmd_info_t),
1013 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001014 if(rc < 0) {
1015 LOGE("Dualcam: failed to map Related cam sync buffer");
1016 rc = FAILED_TRANSACTION;
1017 return NO_MEMORY;
1018 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001019 m_pDualCamCmdPtr =
1020 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001021 }
1022
1023 LOGH("mCameraId=%d",mCameraId);
1024
1025 return NO_ERROR;
1026}
1027
1028/*===========================================================================
1029 * FUNCTION : closeCamera
1030 *
1031 * DESCRIPTION: close camera
1032 *
1033 * PARAMETERS : none
1034 *
1035 * RETURN : int32_t type of status
1036 * NO_ERROR -- success
1037 * none-zero failure code
1038 *==========================================================================*/
1039int QCamera3HardwareInterface::closeCamera()
1040{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001041 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001042 int rc = NO_ERROR;
1043 char value[PROPERTY_VALUE_MAX];
1044
1045 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1046 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001047
1048 // unmap memory for related cam sync buffer
1049 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001050 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001051 if (NULL != m_pDualCamCmdHeap) {
1052 m_pDualCamCmdHeap->deallocate();
1053 delete m_pDualCamCmdHeap;
1054 m_pDualCamCmdHeap = NULL;
1055 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001056 }
1057
Thierry Strudel3d639192016-09-09 11:52:26 -07001058 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1059 mCameraHandle = NULL;
1060
1061 //reset session id to some invalid id
1062 pthread_mutex_lock(&gCamLock);
1063 sessionId[mCameraId] = 0xDEADBEEF;
1064 pthread_mutex_unlock(&gCamLock);
1065
1066 //Notify display HAL that there is no active camera session
1067 //but avoid calling the same during bootup. Refer to openCamera
1068 //for more details.
1069 property_get("service.bootanim.exit", value, "0");
1070 if (atoi(value) == 1) {
1071 pthread_mutex_lock(&gCamLock);
1072 if (--gNumCameraSessions == 0) {
1073 setCameraLaunchStatus(false);
1074 }
1075 pthread_mutex_unlock(&gCamLock);
1076 }
1077
Thierry Strudel3d639192016-09-09 11:52:26 -07001078 if (mExifParams.debug_params) {
1079 free(mExifParams.debug_params);
1080 mExifParams.debug_params = NULL;
1081 }
1082 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1083 LOGW("Failed to release flash for camera id: %d",
1084 mCameraId);
1085 }
1086 mState = CLOSED;
1087 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1088 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001089
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001090 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001091 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1092 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001093 if (gHdrPlusClient != nullptr) {
1094 // Disable HDR+ mode.
1095 disableHdrPlusModeLocked();
1096 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001097 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001098 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001099 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001100
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001101 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001102 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001103 if (rc != 0) {
1104 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1105 }
1106
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001107 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001108 if (rc != 0) {
1109 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1110 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001111 }
1112 }
1113
Thierry Strudel3d639192016-09-09 11:52:26 -07001114 return rc;
1115}
1116
1117/*===========================================================================
1118 * FUNCTION : initialize
1119 *
1120 * DESCRIPTION: Initialize frameworks callback functions
1121 *
1122 * PARAMETERS :
1123 * @callback_ops : callback function to frameworks
1124 *
1125 * RETURN :
1126 *
1127 *==========================================================================*/
1128int QCamera3HardwareInterface::initialize(
1129 const struct camera3_callback_ops *callback_ops)
1130{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001131 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001132 int rc;
1133
1134 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1135 pthread_mutex_lock(&mMutex);
1136
1137 // Validate current state
1138 switch (mState) {
1139 case OPENED:
1140 /* valid state */
1141 break;
1142 default:
1143 LOGE("Invalid state %d", mState);
1144 rc = -ENODEV;
1145 goto err1;
1146 }
1147
1148 rc = initParameters();
1149 if (rc < 0) {
1150 LOGE("initParamters failed %d", rc);
1151 goto err1;
1152 }
1153 mCallbackOps = callback_ops;
1154
1155 mChannelHandle = mCameraHandle->ops->add_channel(
1156 mCameraHandle->camera_handle, NULL, NULL, this);
1157 if (mChannelHandle == 0) {
1158 LOGE("add_channel failed");
1159 rc = -ENOMEM;
1160 pthread_mutex_unlock(&mMutex);
1161 return rc;
1162 }
1163
1164 pthread_mutex_unlock(&mMutex);
1165 mCameraInitialized = true;
1166 mState = INITIALIZED;
1167 LOGI("X");
1168 return 0;
1169
1170err1:
1171 pthread_mutex_unlock(&mMutex);
1172 return rc;
1173}
1174
1175/*===========================================================================
1176 * FUNCTION : validateStreamDimensions
1177 *
1178 * DESCRIPTION: Check if the configuration requested are those advertised
1179 *
1180 * PARAMETERS :
1181 * @stream_list : streams to be configured
1182 *
1183 * RETURN :
1184 *
1185 *==========================================================================*/
1186int QCamera3HardwareInterface::validateStreamDimensions(
1187 camera3_stream_configuration_t *streamList)
1188{
1189 int rc = NO_ERROR;
1190 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001191 uint32_t depthWidth = 0;
1192 uint32_t depthHeight = 0;
1193 if (mPDSupported) {
1194 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1195 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1196 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001197
1198 camera3_stream_t *inputStream = NULL;
1199 /*
1200 * Loop through all streams to find input stream if it exists*
1201 */
1202 for (size_t i = 0; i< streamList->num_streams; i++) {
1203 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1204 if (inputStream != NULL) {
1205 LOGE("Error, Multiple input streams requested");
1206 return -EINVAL;
1207 }
1208 inputStream = streamList->streams[i];
1209 }
1210 }
1211 /*
1212 * Loop through all streams requested in configuration
1213 * Check if unsupported sizes have been requested on any of them
1214 */
1215 for (size_t j = 0; j < streamList->num_streams; j++) {
1216 bool sizeFound = false;
1217 camera3_stream_t *newStream = streamList->streams[j];
1218
1219 uint32_t rotatedHeight = newStream->height;
1220 uint32_t rotatedWidth = newStream->width;
1221 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1222 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1223 rotatedHeight = newStream->width;
1224 rotatedWidth = newStream->height;
1225 }
1226
1227 /*
1228 * Sizes are different for each type of stream format check against
1229 * appropriate table.
1230 */
1231 switch (newStream->format) {
1232 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1233 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1234 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001235 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1236 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1237 mPDSupported) {
1238 if ((depthWidth == newStream->width) &&
1239 (depthHeight == newStream->height)) {
1240 sizeFound = true;
1241 }
1242 break;
1243 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001244 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1245 for (size_t i = 0; i < count; i++) {
1246 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1247 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1248 sizeFound = true;
1249 break;
1250 }
1251 }
1252 break;
1253 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001254 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1255 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001256 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001257 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001258 if ((depthSamplesCount == newStream->width) &&
1259 (1 == newStream->height)) {
1260 sizeFound = true;
1261 }
1262 break;
1263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1265 /* Verify set size against generated sizes table */
1266 for (size_t i = 0; i < count; i++) {
1267 if (((int32_t)rotatedWidth ==
1268 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1269 ((int32_t)rotatedHeight ==
1270 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1271 sizeFound = true;
1272 break;
1273 }
1274 }
1275 break;
1276 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1277 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1278 default:
1279 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1280 || newStream->stream_type == CAMERA3_STREAM_INPUT
1281 || IS_USAGE_ZSL(newStream->usage)) {
1282 if (((int32_t)rotatedWidth ==
1283 gCamCapability[mCameraId]->active_array_size.width) &&
1284 ((int32_t)rotatedHeight ==
1285 gCamCapability[mCameraId]->active_array_size.height)) {
1286 sizeFound = true;
1287 break;
1288 }
1289 /* We could potentially break here to enforce ZSL stream
1290 * set from frameworks always is full active array size
1291 * but it is not clear from the spc if framework will always
1292 * follow that, also we have logic to override to full array
1293 * size, so keeping the logic lenient at the moment
1294 */
1295 }
1296 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1297 MAX_SIZES_CNT);
1298 for (size_t i = 0; i < count; i++) {
1299 if (((int32_t)rotatedWidth ==
1300 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1301 ((int32_t)rotatedHeight ==
1302 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1303 sizeFound = true;
1304 break;
1305 }
1306 }
1307 break;
1308 } /* End of switch(newStream->format) */
1309
1310 /* We error out even if a single stream has unsupported size set */
1311 if (!sizeFound) {
1312 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1313 rotatedWidth, rotatedHeight, newStream->format,
1314 gCamCapability[mCameraId]->active_array_size.width,
1315 gCamCapability[mCameraId]->active_array_size.height);
1316 rc = -EINVAL;
1317 break;
1318 }
1319 } /* End of for each stream */
1320 return rc;
1321}
1322
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001323/*===========================================================================
1324 * FUNCTION : validateUsageFlags
1325 *
1326 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1327 *
1328 * PARAMETERS :
1329 * @stream_list : streams to be configured
1330 *
1331 * RETURN :
1332 * NO_ERROR if the usage flags are supported
1333 * error code if usage flags are not supported
1334 *
1335 *==========================================================================*/
1336int QCamera3HardwareInterface::validateUsageFlags(
1337 const camera3_stream_configuration_t* streamList)
1338{
1339 for (size_t j = 0; j < streamList->num_streams; j++) {
1340 const camera3_stream_t *newStream = streamList->streams[j];
1341
1342 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1343 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1344 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1345 continue;
1346 }
1347
Jason Leec4cf5032017-05-24 18:31:41 -07001348 // Here we only care whether it's EIS3 or not
1349 char is_type_value[PROPERTY_VALUE_MAX];
1350 property_get("persist.camera.is_type", is_type_value, "4");
1351 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1352 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1353 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1354 isType = IS_TYPE_NONE;
1355
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001356 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1357 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1358 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1359 bool forcePreviewUBWC = true;
1360 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1361 forcePreviewUBWC = false;
1362 }
1363 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001364 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001365 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001366 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001367 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001368 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001369
1370 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1371 // So color spaces will always match.
1372
1373 // Check whether underlying formats of shared streams match.
1374 if (isVideo && isPreview && videoFormat != previewFormat) {
1375 LOGE("Combined video and preview usage flag is not supported");
1376 return -EINVAL;
1377 }
1378 if (isPreview && isZSL && previewFormat != zslFormat) {
1379 LOGE("Combined preview and zsl usage flag is not supported");
1380 return -EINVAL;
1381 }
1382 if (isVideo && isZSL && videoFormat != zslFormat) {
1383 LOGE("Combined video and zsl usage flag is not supported");
1384 return -EINVAL;
1385 }
1386 }
1387 return NO_ERROR;
1388}
1389
1390/*===========================================================================
1391 * FUNCTION : validateUsageFlagsForEis
1392 *
1393 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1394 *
1395 * PARAMETERS :
1396 * @stream_list : streams to be configured
1397 *
1398 * RETURN :
1399 * NO_ERROR if the usage flags are supported
1400 * error code if usage flags are not supported
1401 *
1402 *==========================================================================*/
1403int QCamera3HardwareInterface::validateUsageFlagsForEis(
1404 const camera3_stream_configuration_t* streamList)
1405{
1406 for (size_t j = 0; j < streamList->num_streams; j++) {
1407 const camera3_stream_t *newStream = streamList->streams[j];
1408
1409 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1410 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1411
1412 // Because EIS is "hard-coded" for certain use case, and current
1413 // implementation doesn't support shared preview and video on the same
1414 // stream, return failure if EIS is forced on.
1415 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1416 LOGE("Combined video and preview usage flag is not supported due to EIS");
1417 return -EINVAL;
1418 }
1419 }
1420 return NO_ERROR;
1421}
1422
Thierry Strudel3d639192016-09-09 11:52:26 -07001423/*==============================================================================
1424 * FUNCTION : isSupportChannelNeeded
1425 *
1426 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1427 *
1428 * PARAMETERS :
1429 * @stream_list : streams to be configured
1430 * @stream_config_info : the config info for streams to be configured
1431 *
1432 * RETURN : Boolen true/false decision
1433 *
1434 *==========================================================================*/
1435bool QCamera3HardwareInterface::isSupportChannelNeeded(
1436 camera3_stream_configuration_t *streamList,
1437 cam_stream_size_info_t stream_config_info)
1438{
1439 uint32_t i;
1440 bool pprocRequested = false;
1441 /* Check for conditions where PProc pipeline does not have any streams*/
1442 for (i = 0; i < stream_config_info.num_streams; i++) {
1443 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1444 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1445 pprocRequested = true;
1446 break;
1447 }
1448 }
1449
1450 if (pprocRequested == false )
1451 return true;
1452
1453 /* Dummy stream needed if only raw or jpeg streams present */
1454 for (i = 0; i < streamList->num_streams; i++) {
1455 switch(streamList->streams[i]->format) {
1456 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1457 case HAL_PIXEL_FORMAT_RAW10:
1458 case HAL_PIXEL_FORMAT_RAW16:
1459 case HAL_PIXEL_FORMAT_BLOB:
1460 break;
1461 default:
1462 return false;
1463 }
1464 }
1465 return true;
1466}
1467
1468/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001469 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001470 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001471 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001472 *
1473 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001474 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001475 *
1476 * RETURN : int32_t type of status
1477 * NO_ERROR -- success
1478 * none-zero failure code
1479 *
1480 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001482{
1483 int32_t rc = NO_ERROR;
1484
1485 cam_dimension_t max_dim = {0, 0};
1486 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1487 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1488 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1489 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1490 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1491 }
1492
1493 clear_metadata_buffer(mParameters);
1494
1495 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1496 max_dim);
1497 if (rc != NO_ERROR) {
1498 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1499 return rc;
1500 }
1501
1502 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1503 if (rc != NO_ERROR) {
1504 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1505 return rc;
1506 }
1507
1508 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001509 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001510
1511 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1512 mParameters);
1513 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001514 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001515 return rc;
1516 }
1517
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001518 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001519 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1520 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1521 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1522 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1523 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001524
1525 return rc;
1526}
1527
1528/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001529 * FUNCTION : getCurrentSensorModeInfo
1530 *
1531 * DESCRIPTION: Get sensor mode information that is currently selected.
1532 *
1533 * PARAMETERS :
1534 * @sensorModeInfo : sensor mode information (output)
1535 *
1536 * RETURN : int32_t type of status
1537 * NO_ERROR -- success
1538 * none-zero failure code
1539 *
1540 *==========================================================================*/
1541int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1542{
1543 int32_t rc = NO_ERROR;
1544
1545 clear_metadata_buffer(mParameters);
1546 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1547
1548 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1549 mParameters);
1550 if (rc != NO_ERROR) {
1551 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1552 return rc;
1553 }
1554
1555 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1556 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1557 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1558 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1559 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1560 sensorModeInfo.num_raw_bits);
1561
1562 return rc;
1563}
1564
1565/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001566 * FUNCTION : addToPPFeatureMask
1567 *
1568 * DESCRIPTION: add additional features to pp feature mask based on
1569 * stream type and usecase
1570 *
1571 * PARAMETERS :
1572 * @stream_format : stream type for feature mask
1573 * @stream_idx : stream idx within postprocess_mask list to change
1574 *
1575 * RETURN : NULL
1576 *
1577 *==========================================================================*/
1578void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1579 uint32_t stream_idx)
1580{
1581 char feature_mask_value[PROPERTY_VALUE_MAX];
1582 cam_feature_mask_t feature_mask;
1583 int args_converted;
1584 int property_len;
1585
1586 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001587#ifdef _LE_CAMERA_
1588 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1589 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1590 property_len = property_get("persist.camera.hal3.feature",
1591 feature_mask_value, swtnr_feature_mask_value);
1592#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001593 property_len = property_get("persist.camera.hal3.feature",
1594 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001595#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001596 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1597 (feature_mask_value[1] == 'x')) {
1598 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1599 } else {
1600 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1601 }
1602 if (1 != args_converted) {
1603 feature_mask = 0;
1604 LOGE("Wrong feature mask %s", feature_mask_value);
1605 return;
1606 }
1607
1608 switch (stream_format) {
1609 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1610 /* Add LLVD to pp feature mask only if video hint is enabled */
1611 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1612 mStreamConfigInfo.postprocess_mask[stream_idx]
1613 |= CAM_QTI_FEATURE_SW_TNR;
1614 LOGH("Added SW TNR to pp feature mask");
1615 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1616 mStreamConfigInfo.postprocess_mask[stream_idx]
1617 |= CAM_QCOM_FEATURE_LLVD;
1618 LOGH("Added LLVD SeeMore to pp feature mask");
1619 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001620 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1621 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1622 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1623 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001624 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1625 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1626 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1627 CAM_QTI_FEATURE_BINNING_CORRECTION;
1628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001629 break;
1630 }
1631 default:
1632 break;
1633 }
1634 LOGD("PP feature mask %llx",
1635 mStreamConfigInfo.postprocess_mask[stream_idx]);
1636}
1637
1638/*==============================================================================
1639 * FUNCTION : updateFpsInPreviewBuffer
1640 *
1641 * DESCRIPTION: update FPS information in preview buffer.
1642 *
1643 * PARAMETERS :
1644 * @metadata : pointer to metadata buffer
1645 * @frame_number: frame_number to look for in pending buffer list
1646 *
1647 * RETURN : None
1648 *
1649 *==========================================================================*/
1650void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1651 uint32_t frame_number)
1652{
1653 // Mark all pending buffers for this particular request
1654 // with corresponding framerate information
1655 for (List<PendingBuffersInRequest>::iterator req =
1656 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1657 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1658 for(List<PendingBufferInfo>::iterator j =
1659 req->mPendingBufferList.begin();
1660 j != req->mPendingBufferList.end(); j++) {
1661 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1662 if ((req->frame_number == frame_number) &&
1663 (channel->getStreamTypeMask() &
1664 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1665 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1666 CAM_INTF_PARM_FPS_RANGE, metadata) {
1667 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1668 struct private_handle_t *priv_handle =
1669 (struct private_handle_t *)(*(j->buffer));
1670 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1671 }
1672 }
1673 }
1674 }
1675}
1676
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001677/*==============================================================================
1678 * FUNCTION : updateTimeStampInPendingBuffers
1679 *
1680 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1681 * of a frame number
1682 *
1683 * PARAMETERS :
1684 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1685 * @timestamp : timestamp to be set
1686 *
1687 * RETURN : None
1688 *
1689 *==========================================================================*/
1690void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1691 uint32_t frameNumber, nsecs_t timestamp)
1692{
1693 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1694 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1695 if (req->frame_number != frameNumber)
1696 continue;
1697
1698 for (auto k = req->mPendingBufferList.begin();
1699 k != req->mPendingBufferList.end(); k++ ) {
1700 struct private_handle_t *priv_handle =
1701 (struct private_handle_t *) (*(k->buffer));
1702 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1703 }
1704 }
1705 return;
1706}
1707
Thierry Strudel3d639192016-09-09 11:52:26 -07001708/*===========================================================================
1709 * FUNCTION : configureStreams
1710 *
1711 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1712 * and output streams.
1713 *
1714 * PARAMETERS :
1715 * @stream_list : streams to be configured
1716 *
1717 * RETURN :
1718 *
1719 *==========================================================================*/
1720int QCamera3HardwareInterface::configureStreams(
1721 camera3_stream_configuration_t *streamList)
1722{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001723 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001724 int rc = 0;
1725
1726 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001727 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001728 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001729 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001730
1731 return rc;
1732}
1733
1734/*===========================================================================
1735 * FUNCTION : configureStreamsPerfLocked
1736 *
1737 * DESCRIPTION: configureStreams while perfLock is held.
1738 *
1739 * PARAMETERS :
1740 * @stream_list : streams to be configured
1741 *
1742 * RETURN : int32_t type of status
1743 * NO_ERROR -- success
1744 * none-zero failure code
1745 *==========================================================================*/
1746int QCamera3HardwareInterface::configureStreamsPerfLocked(
1747 camera3_stream_configuration_t *streamList)
1748{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001749 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001750 int rc = 0;
1751
1752 // Sanity check stream_list
1753 if (streamList == NULL) {
1754 LOGE("NULL stream configuration");
1755 return BAD_VALUE;
1756 }
1757 if (streamList->streams == NULL) {
1758 LOGE("NULL stream list");
1759 return BAD_VALUE;
1760 }
1761
1762 if (streamList->num_streams < 1) {
1763 LOGE("Bad number of streams requested: %d",
1764 streamList->num_streams);
1765 return BAD_VALUE;
1766 }
1767
1768 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1769 LOGE("Maximum number of streams %d exceeded: %d",
1770 MAX_NUM_STREAMS, streamList->num_streams);
1771 return BAD_VALUE;
1772 }
1773
Jason Leec4cf5032017-05-24 18:31:41 -07001774 mOpMode = streamList->operation_mode;
1775 LOGD("mOpMode: %d", mOpMode);
1776
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001777 rc = validateUsageFlags(streamList);
1778 if (rc != NO_ERROR) {
1779 return rc;
1780 }
1781
Thierry Strudel3d639192016-09-09 11:52:26 -07001782 /* first invalidate all the steams in the mStreamList
1783 * if they appear again, they will be validated */
1784 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1785 it != mStreamInfo.end(); it++) {
1786 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1787 if (channel) {
1788 channel->stop();
1789 }
1790 (*it)->status = INVALID;
1791 }
1792
1793 if (mRawDumpChannel) {
1794 mRawDumpChannel->stop();
1795 delete mRawDumpChannel;
1796 mRawDumpChannel = NULL;
1797 }
1798
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001799 if (mHdrPlusRawSrcChannel) {
1800 mHdrPlusRawSrcChannel->stop();
1801 delete mHdrPlusRawSrcChannel;
1802 mHdrPlusRawSrcChannel = NULL;
1803 }
1804
Thierry Strudel3d639192016-09-09 11:52:26 -07001805 if (mSupportChannel)
1806 mSupportChannel->stop();
1807
1808 if (mAnalysisChannel) {
1809 mAnalysisChannel->stop();
1810 }
1811 if (mMetadataChannel) {
1812 /* If content of mStreamInfo is not 0, there is metadata stream */
1813 mMetadataChannel->stop();
1814 }
1815 if (mChannelHandle) {
1816 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1817 mChannelHandle);
1818 LOGD("stopping channel %d", mChannelHandle);
1819 }
1820
1821 pthread_mutex_lock(&mMutex);
1822
1823 // Check state
1824 switch (mState) {
1825 case INITIALIZED:
1826 case CONFIGURED:
1827 case STARTED:
1828 /* valid state */
1829 break;
1830 default:
1831 LOGE("Invalid state %d", mState);
1832 pthread_mutex_unlock(&mMutex);
1833 return -ENODEV;
1834 }
1835
1836 /* Check whether we have video stream */
1837 m_bIs4KVideo = false;
1838 m_bIsVideo = false;
1839 m_bEisSupportedSize = false;
1840 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001841 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001843 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001844 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 uint32_t videoWidth = 0U;
1846 uint32_t videoHeight = 0U;
1847 size_t rawStreamCnt = 0;
1848 size_t stallStreamCnt = 0;
1849 size_t processedStreamCnt = 0;
1850 // Number of streams on ISP encoder path
1851 size_t numStreamsOnEncoder = 0;
1852 size_t numYuv888OnEncoder = 0;
1853 bool bYuv888OverrideJpeg = false;
1854 cam_dimension_t largeYuv888Size = {0, 0};
1855 cam_dimension_t maxViewfinderSize = {0, 0};
1856 bool bJpegExceeds4K = false;
1857 bool bJpegOnEncoder = false;
1858 bool bUseCommonFeatureMask = false;
1859 cam_feature_mask_t commonFeatureMask = 0;
1860 bool bSmallJpegSize = false;
1861 uint32_t width_ratio;
1862 uint32_t height_ratio;
1863 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1864 camera3_stream_t *inputStream = NULL;
1865 bool isJpeg = false;
1866 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001867 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001868 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001869
1870 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1871
1872 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001873 uint8_t eis_prop_set;
1874 uint32_t maxEisWidth = 0;
1875 uint32_t maxEisHeight = 0;
1876
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001877 // Initialize all instant AEC related variables
1878 mInstantAEC = false;
1879 mResetInstantAEC = false;
1880 mInstantAECSettledFrameNumber = 0;
1881 mAecSkipDisplayFrameBound = 0;
1882 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001883 mCurrFeatureState = 0;
1884 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001885
Thierry Strudel3d639192016-09-09 11:52:26 -07001886 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1887
1888 size_t count = IS_TYPE_MAX;
1889 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1890 for (size_t i = 0; i < count; i++) {
1891 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001892 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1893 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001894 break;
1895 }
1896 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001897
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001898 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001899 maxEisWidth = MAX_EIS_WIDTH;
1900 maxEisHeight = MAX_EIS_HEIGHT;
1901 }
1902
1903 /* EIS setprop control */
1904 char eis_prop[PROPERTY_VALUE_MAX];
1905 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001906 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001907 eis_prop_set = (uint8_t)atoi(eis_prop);
1908
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001909 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1911
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001912 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1913 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001914
Thierry Strudel3d639192016-09-09 11:52:26 -07001915 /* stream configurations */
1916 for (size_t i = 0; i < streamList->num_streams; i++) {
1917 camera3_stream_t *newStream = streamList->streams[i];
1918 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1919 "height = %d, rotation = %d, usage = 0x%x",
1920 i, newStream->stream_type, newStream->format,
1921 newStream->width, newStream->height, newStream->rotation,
1922 newStream->usage);
1923 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1924 newStream->stream_type == CAMERA3_STREAM_INPUT){
1925 isZsl = true;
1926 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001927 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1928 IS_USAGE_PREVIEW(newStream->usage)) {
1929 isPreview = true;
1930 }
1931
Thierry Strudel3d639192016-09-09 11:52:26 -07001932 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1933 inputStream = newStream;
1934 }
1935
Emilian Peev7650c122017-01-19 08:24:33 -08001936 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1937 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001938 isJpeg = true;
1939 jpegSize.width = newStream->width;
1940 jpegSize.height = newStream->height;
1941 if (newStream->width > VIDEO_4K_WIDTH ||
1942 newStream->height > VIDEO_4K_HEIGHT)
1943 bJpegExceeds4K = true;
1944 }
1945
1946 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1947 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1948 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001949 // In HAL3 we can have multiple different video streams.
1950 // The variables video width and height are used below as
1951 // dimensions of the biggest of them
1952 if (videoWidth < newStream->width ||
1953 videoHeight < newStream->height) {
1954 videoWidth = newStream->width;
1955 videoHeight = newStream->height;
1956 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001957 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1958 (VIDEO_4K_HEIGHT <= newStream->height)) {
1959 m_bIs4KVideo = true;
1960 }
1961 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1962 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001963
Thierry Strudel3d639192016-09-09 11:52:26 -07001964 }
1965 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1966 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1967 switch (newStream->format) {
1968 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001969 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1970 depthPresent = true;
1971 break;
1972 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001973 stallStreamCnt++;
1974 if (isOnEncoder(maxViewfinderSize, newStream->width,
1975 newStream->height)) {
1976 numStreamsOnEncoder++;
1977 bJpegOnEncoder = true;
1978 }
1979 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1980 newStream->width);
1981 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1982 newStream->height);;
1983 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1984 "FATAL: max_downscale_factor cannot be zero and so assert");
1985 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1986 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1987 LOGH("Setting small jpeg size flag to true");
1988 bSmallJpegSize = true;
1989 }
1990 break;
1991 case HAL_PIXEL_FORMAT_RAW10:
1992 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1993 case HAL_PIXEL_FORMAT_RAW16:
1994 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001995 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1996 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1997 pdStatCount++;
1998 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001999 break;
2000 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2001 processedStreamCnt++;
2002 if (isOnEncoder(maxViewfinderSize, newStream->width,
2003 newStream->height)) {
2004 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2005 !IS_USAGE_ZSL(newStream->usage)) {
2006 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2007 }
2008 numStreamsOnEncoder++;
2009 }
2010 break;
2011 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2012 processedStreamCnt++;
2013 if (isOnEncoder(maxViewfinderSize, newStream->width,
2014 newStream->height)) {
2015 // If Yuv888 size is not greater than 4K, set feature mask
2016 // to SUPERSET so that it support concurrent request on
2017 // YUV and JPEG.
2018 if (newStream->width <= VIDEO_4K_WIDTH &&
2019 newStream->height <= VIDEO_4K_HEIGHT) {
2020 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2021 }
2022 numStreamsOnEncoder++;
2023 numYuv888OnEncoder++;
2024 largeYuv888Size.width = newStream->width;
2025 largeYuv888Size.height = newStream->height;
2026 }
2027 break;
2028 default:
2029 processedStreamCnt++;
2030 if (isOnEncoder(maxViewfinderSize, newStream->width,
2031 newStream->height)) {
2032 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2033 numStreamsOnEncoder++;
2034 }
2035 break;
2036 }
2037
2038 }
2039 }
2040
2041 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2042 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2043 !m_bIsVideo) {
2044 m_bEisEnable = false;
2045 }
2046
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002047 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2048 pthread_mutex_unlock(&mMutex);
2049 return -EINVAL;
2050 }
2051
Thierry Strudel54dc9782017-02-15 12:12:10 -08002052 uint8_t forceEnableTnr = 0;
2053 char tnr_prop[PROPERTY_VALUE_MAX];
2054 memset(tnr_prop, 0, sizeof(tnr_prop));
2055 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2056 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2057
Thierry Strudel3d639192016-09-09 11:52:26 -07002058 /* Logic to enable/disable TNR based on specific config size/etc.*/
2059 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002060 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2061 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002062 else if (forceEnableTnr)
2063 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002064
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002065 char videoHdrProp[PROPERTY_VALUE_MAX];
2066 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2067 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2068 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2069
2070 if (hdr_mode_prop == 1 && m_bIsVideo &&
2071 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2072 m_bVideoHdrEnabled = true;
2073 else
2074 m_bVideoHdrEnabled = false;
2075
2076
Thierry Strudel3d639192016-09-09 11:52:26 -07002077 /* Check if num_streams is sane */
2078 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2079 rawStreamCnt > MAX_RAW_STREAMS ||
2080 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2081 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2082 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2083 pthread_mutex_unlock(&mMutex);
2084 return -EINVAL;
2085 }
2086 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002087 if (isZsl && m_bIs4KVideo) {
2088 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002089 pthread_mutex_unlock(&mMutex);
2090 return -EINVAL;
2091 }
2092 /* Check if stream sizes are sane */
2093 if (numStreamsOnEncoder > 2) {
2094 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2095 pthread_mutex_unlock(&mMutex);
2096 return -EINVAL;
2097 } else if (1 < numStreamsOnEncoder){
2098 bUseCommonFeatureMask = true;
2099 LOGH("Multiple streams above max viewfinder size, common mask needed");
2100 }
2101
2102 /* Check if BLOB size is greater than 4k in 4k recording case */
2103 if (m_bIs4KVideo && bJpegExceeds4K) {
2104 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2105 pthread_mutex_unlock(&mMutex);
2106 return -EINVAL;
2107 }
2108
Emilian Peev7650c122017-01-19 08:24:33 -08002109 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2110 depthPresent) {
2111 LOGE("HAL doesn't support depth streams in HFR mode!");
2112 pthread_mutex_unlock(&mMutex);
2113 return -EINVAL;
2114 }
2115
Thierry Strudel3d639192016-09-09 11:52:26 -07002116 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2117 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2118 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2119 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2120 // configurations:
2121 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2122 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2123 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2124 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2125 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2126 __func__);
2127 pthread_mutex_unlock(&mMutex);
2128 return -EINVAL;
2129 }
2130
2131 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2132 // the YUV stream's size is greater or equal to the JPEG size, set common
2133 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2134 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2135 jpegSize.width, jpegSize.height) &&
2136 largeYuv888Size.width > jpegSize.width &&
2137 largeYuv888Size.height > jpegSize.height) {
2138 bYuv888OverrideJpeg = true;
2139 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2140 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2141 }
2142
2143 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2144 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2145 commonFeatureMask);
2146 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2147 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2148
2149 rc = validateStreamDimensions(streamList);
2150 if (rc == NO_ERROR) {
2151 rc = validateStreamRotations(streamList);
2152 }
2153 if (rc != NO_ERROR) {
2154 LOGE("Invalid stream configuration requested!");
2155 pthread_mutex_unlock(&mMutex);
2156 return rc;
2157 }
2158
Emilian Peev0f3c3162017-03-15 12:57:46 +00002159 if (1 < pdStatCount) {
2160 LOGE("HAL doesn't support multiple PD streams");
2161 pthread_mutex_unlock(&mMutex);
2162 return -EINVAL;
2163 }
2164
2165 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2166 (1 == pdStatCount)) {
2167 LOGE("HAL doesn't support PD streams in HFR mode!");
2168 pthread_mutex_unlock(&mMutex);
2169 return -EINVAL;
2170 }
2171
Thierry Strudel3d639192016-09-09 11:52:26 -07002172 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2173 for (size_t i = 0; i < streamList->num_streams; i++) {
2174 camera3_stream_t *newStream = streamList->streams[i];
2175 LOGH("newStream type = %d, stream format = %d "
2176 "stream size : %d x %d, stream rotation = %d",
2177 newStream->stream_type, newStream->format,
2178 newStream->width, newStream->height, newStream->rotation);
2179 //if the stream is in the mStreamList validate it
2180 bool stream_exists = false;
2181 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2182 it != mStreamInfo.end(); it++) {
2183 if ((*it)->stream == newStream) {
2184 QCamera3ProcessingChannel *channel =
2185 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2186 stream_exists = true;
2187 if (channel)
2188 delete channel;
2189 (*it)->status = VALID;
2190 (*it)->stream->priv = NULL;
2191 (*it)->channel = NULL;
2192 }
2193 }
2194 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2195 //new stream
2196 stream_info_t* stream_info;
2197 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2198 if (!stream_info) {
2199 LOGE("Could not allocate stream info");
2200 rc = -ENOMEM;
2201 pthread_mutex_unlock(&mMutex);
2202 return rc;
2203 }
2204 stream_info->stream = newStream;
2205 stream_info->status = VALID;
2206 stream_info->channel = NULL;
2207 mStreamInfo.push_back(stream_info);
2208 }
2209 /* Covers Opaque ZSL and API1 F/W ZSL */
2210 if (IS_USAGE_ZSL(newStream->usage)
2211 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2212 if (zslStream != NULL) {
2213 LOGE("Multiple input/reprocess streams requested!");
2214 pthread_mutex_unlock(&mMutex);
2215 return BAD_VALUE;
2216 }
2217 zslStream = newStream;
2218 }
2219 /* Covers YUV reprocess */
2220 if (inputStream != NULL) {
2221 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2222 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2223 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2224 && inputStream->width == newStream->width
2225 && inputStream->height == newStream->height) {
2226 if (zslStream != NULL) {
2227 /* This scenario indicates multiple YUV streams with same size
2228 * as input stream have been requested, since zsl stream handle
2229 * is solely use for the purpose of overriding the size of streams
2230 * which share h/w streams we will just make a guess here as to
2231 * which of the stream is a ZSL stream, this will be refactored
2232 * once we make generic logic for streams sharing encoder output
2233 */
2234 LOGH("Warning, Multiple ip/reprocess streams requested!");
2235 }
2236 zslStream = newStream;
2237 }
2238 }
2239 }
2240
2241 /* If a zsl stream is set, we know that we have configured at least one input or
2242 bidirectional stream */
2243 if (NULL != zslStream) {
2244 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2245 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2246 mInputStreamInfo.format = zslStream->format;
2247 mInputStreamInfo.usage = zslStream->usage;
2248 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2249 mInputStreamInfo.dim.width,
2250 mInputStreamInfo.dim.height,
2251 mInputStreamInfo.format, mInputStreamInfo.usage);
2252 }
2253
2254 cleanAndSortStreamInfo();
2255 if (mMetadataChannel) {
2256 delete mMetadataChannel;
2257 mMetadataChannel = NULL;
2258 }
2259 if (mSupportChannel) {
2260 delete mSupportChannel;
2261 mSupportChannel = NULL;
2262 }
2263
2264 if (mAnalysisChannel) {
2265 delete mAnalysisChannel;
2266 mAnalysisChannel = NULL;
2267 }
2268
2269 if (mDummyBatchChannel) {
2270 delete mDummyBatchChannel;
2271 mDummyBatchChannel = NULL;
2272 }
2273
Emilian Peev7650c122017-01-19 08:24:33 -08002274 if (mDepthChannel) {
2275 mDepthChannel = NULL;
2276 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002277 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002278
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002279 mShutterDispatcher.clear();
2280 mOutputBufferDispatcher.clear();
2281
Thierry Strudel2896d122017-02-23 19:18:03 -08002282 char is_type_value[PROPERTY_VALUE_MAX];
2283 property_get("persist.camera.is_type", is_type_value, "4");
2284 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2285
Binhao Line406f062017-05-03 14:39:44 -07002286 char property_value[PROPERTY_VALUE_MAX];
2287 property_get("persist.camera.gzoom.at", property_value, "0");
2288 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002289 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2290 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2291 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2292 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002293
2294 property_get("persist.camera.gzoom.4k", property_value, "0");
2295 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2296
Thierry Strudel3d639192016-09-09 11:52:26 -07002297 //Create metadata channel and initialize it
2298 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2299 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2300 gCamCapability[mCameraId]->color_arrangement);
2301 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2302 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002303 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002304 if (mMetadataChannel == NULL) {
2305 LOGE("failed to allocate metadata channel");
2306 rc = -ENOMEM;
2307 pthread_mutex_unlock(&mMutex);
2308 return rc;
2309 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002310 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002311 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2312 if (rc < 0) {
2313 LOGE("metadata channel initialization failed");
2314 delete mMetadataChannel;
2315 mMetadataChannel = NULL;
2316 pthread_mutex_unlock(&mMutex);
2317 return rc;
2318 }
2319
Thierry Strudel2896d122017-02-23 19:18:03 -08002320 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002321 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002322 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002323 // Keep track of preview/video streams indices.
2324 // There could be more than one preview streams, but only one video stream.
2325 int32_t video_stream_idx = -1;
2326 int32_t preview_stream_idx[streamList->num_streams];
2327 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002328 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2329 /* Allocate channel objects for the requested streams */
2330 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002331
Thierry Strudel3d639192016-09-09 11:52:26 -07002332 camera3_stream_t *newStream = streamList->streams[i];
2333 uint32_t stream_usage = newStream->usage;
2334 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2335 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2336 struct camera_info *p_info = NULL;
2337 pthread_mutex_lock(&gCamLock);
2338 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2339 pthread_mutex_unlock(&gCamLock);
2340 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2341 || IS_USAGE_ZSL(newStream->usage)) &&
2342 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002343 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002345 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2346 if (bUseCommonFeatureMask)
2347 zsl_ppmask = commonFeatureMask;
2348 else
2349 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002350 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002351 if (numStreamsOnEncoder > 0)
2352 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2353 else
2354 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002355 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002356 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002357 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002358 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002359 LOGH("Input stream configured, reprocess config");
2360 } else {
2361 //for non zsl streams find out the format
2362 switch (newStream->format) {
2363 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2364 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002365 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002366 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2367 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2368 /* add additional features to pp feature mask */
2369 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2370 mStreamConfigInfo.num_streams);
2371
2372 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2373 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2374 CAM_STREAM_TYPE_VIDEO;
2375 if (m_bTnrEnabled && m_bTnrVideo) {
2376 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2377 CAM_QCOM_FEATURE_CPP_TNR;
2378 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2379 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2380 ~CAM_QCOM_FEATURE_CDS;
2381 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002382 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2383 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2384 CAM_QTI_FEATURE_PPEISCORE;
2385 }
Binhao Line406f062017-05-03 14:39:44 -07002386 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2387 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2388 CAM_QCOM_FEATURE_GOOG_ZOOM;
2389 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002390 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 } else {
2392 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2393 CAM_STREAM_TYPE_PREVIEW;
2394 if (m_bTnrEnabled && m_bTnrPreview) {
2395 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2396 CAM_QCOM_FEATURE_CPP_TNR;
2397 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2399 ~CAM_QCOM_FEATURE_CDS;
2400 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002401 if(!m_bSwTnrPreview) {
2402 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2403 ~CAM_QTI_FEATURE_SW_TNR;
2404 }
Binhao Line406f062017-05-03 14:39:44 -07002405 if (is_goog_zoom_preview_enabled) {
2406 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2407 CAM_QCOM_FEATURE_GOOG_ZOOM;
2408 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002409 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002410 padding_info.width_padding = mSurfaceStridePadding;
2411 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002412 previewSize.width = (int32_t)newStream->width;
2413 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002414 }
2415 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2416 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2417 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2418 newStream->height;
2419 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2420 newStream->width;
2421 }
2422 }
2423 break;
2424 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002425 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2427 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2428 if (bUseCommonFeatureMask)
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2430 commonFeatureMask;
2431 else
2432 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2433 CAM_QCOM_FEATURE_NONE;
2434 } else {
2435 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2436 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2437 }
2438 break;
2439 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002440 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002441 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2442 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2443 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2444 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2445 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002446 /* Remove rotation if it is not supported
2447 for 4K LiveVideo snapshot case (online processing) */
2448 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2449 CAM_QCOM_FEATURE_ROTATION)) {
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2451 &= ~CAM_QCOM_FEATURE_ROTATION;
2452 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002453 } else {
2454 if (bUseCommonFeatureMask &&
2455 isOnEncoder(maxViewfinderSize, newStream->width,
2456 newStream->height)) {
2457 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2458 } else {
2459 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2460 }
2461 }
2462 if (isZsl) {
2463 if (zslStream) {
2464 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2465 (int32_t)zslStream->width;
2466 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2467 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2469 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002470 } else {
2471 LOGE("Error, No ZSL stream identified");
2472 pthread_mutex_unlock(&mMutex);
2473 return -EINVAL;
2474 }
2475 } else if (m_bIs4KVideo) {
2476 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2477 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2478 } else if (bYuv888OverrideJpeg) {
2479 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2480 (int32_t)largeYuv888Size.width;
2481 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2482 (int32_t)largeYuv888Size.height;
2483 }
2484 break;
2485 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2486 case HAL_PIXEL_FORMAT_RAW16:
2487 case HAL_PIXEL_FORMAT_RAW10:
2488 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2489 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2490 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002491 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2492 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2493 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2494 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2495 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2496 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2497 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2498 gCamCapability[mCameraId]->dt[mPDIndex];
2499 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2500 gCamCapability[mCameraId]->vc[mPDIndex];
2501 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002502 break;
2503 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002504 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002505 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2506 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2507 break;
2508 }
2509 }
2510
2511 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2512 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2513 gCamCapability[mCameraId]->color_arrangement);
2514
2515 if (newStream->priv == NULL) {
2516 //New stream, construct channel
2517 switch (newStream->stream_type) {
2518 case CAMERA3_STREAM_INPUT:
2519 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2520 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2521 break;
2522 case CAMERA3_STREAM_BIDIRECTIONAL:
2523 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2524 GRALLOC_USAGE_HW_CAMERA_WRITE;
2525 break;
2526 case CAMERA3_STREAM_OUTPUT:
2527 /* For video encoding stream, set read/write rarely
2528 * flag so that they may be set to un-cached */
2529 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2530 newStream->usage |=
2531 (GRALLOC_USAGE_SW_READ_RARELY |
2532 GRALLOC_USAGE_SW_WRITE_RARELY |
2533 GRALLOC_USAGE_HW_CAMERA_WRITE);
2534 else if (IS_USAGE_ZSL(newStream->usage))
2535 {
2536 LOGD("ZSL usage flag skipping");
2537 }
2538 else if (newStream == zslStream
2539 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2540 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2541 } else
2542 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2543 break;
2544 default:
2545 LOGE("Invalid stream_type %d", newStream->stream_type);
2546 break;
2547 }
2548
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002549 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002550 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2551 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2552 QCamera3ProcessingChannel *channel = NULL;
2553 switch (newStream->format) {
2554 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2555 if ((newStream->usage &
2556 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2557 (streamList->operation_mode ==
2558 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2559 ) {
2560 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2561 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002562 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002563 this,
2564 newStream,
2565 (cam_stream_type_t)
2566 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2567 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2568 mMetadataChannel,
2569 0); //heap buffers are not required for HFR video channel
2570 if (channel == NULL) {
2571 LOGE("allocation of channel failed");
2572 pthread_mutex_unlock(&mMutex);
2573 return -ENOMEM;
2574 }
2575 //channel->getNumBuffers() will return 0 here so use
2576 //MAX_INFLIGH_HFR_REQUESTS
2577 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2578 newStream->priv = channel;
2579 LOGI("num video buffers in HFR mode: %d",
2580 MAX_INFLIGHT_HFR_REQUESTS);
2581 } else {
2582 /* Copy stream contents in HFR preview only case to create
2583 * dummy batch channel so that sensor streaming is in
2584 * HFR mode */
2585 if (!m_bIsVideo && (streamList->operation_mode ==
2586 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2587 mDummyBatchStream = *newStream;
2588 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002589 int bufferCount = MAX_INFLIGHT_REQUESTS;
2590 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2591 CAM_STREAM_TYPE_VIDEO) {
2592 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2593 bufferCount = MAX_VIDEO_BUFFERS;
2594 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002595 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2596 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002597 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002598 this,
2599 newStream,
2600 (cam_stream_type_t)
2601 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2602 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2603 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002604 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002605 if (channel == NULL) {
2606 LOGE("allocation of channel failed");
2607 pthread_mutex_unlock(&mMutex);
2608 return -ENOMEM;
2609 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002610 /* disable UBWC for preview, though supported,
2611 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002612 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002613 (previewSize.width == (int32_t)videoWidth)&&
2614 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002615 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002616 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002617 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002618 /* When goog_zoom is linked to the preview or video stream,
2619 * disable ubwc to the linked stream */
2620 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2621 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2622 channel->setUBWCEnabled(false);
2623 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002624 newStream->max_buffers = channel->getNumBuffers();
2625 newStream->priv = channel;
2626 }
2627 break;
2628 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2629 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2630 mChannelHandle,
2631 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002632 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002633 this,
2634 newStream,
2635 (cam_stream_type_t)
2636 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2637 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2638 mMetadataChannel);
2639 if (channel == NULL) {
2640 LOGE("allocation of YUV channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
2644 newStream->max_buffers = channel->getNumBuffers();
2645 newStream->priv = channel;
2646 break;
2647 }
2648 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2649 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002650 case HAL_PIXEL_FORMAT_RAW10: {
2651 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2652 (HAL_DATASPACE_DEPTH != newStream->data_space))
2653 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002654 mRawChannel = new QCamera3RawChannel(
2655 mCameraHandle->camera_handle, mChannelHandle,
2656 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002657 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002658 this, newStream,
2659 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002660 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002661 if (mRawChannel == NULL) {
2662 LOGE("allocation of raw channel failed");
2663 pthread_mutex_unlock(&mMutex);
2664 return -ENOMEM;
2665 }
2666 newStream->max_buffers = mRawChannel->getNumBuffers();
2667 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2668 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002669 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002670 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002671 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2672 mDepthChannel = new QCamera3DepthChannel(
2673 mCameraHandle->camera_handle, mChannelHandle,
2674 mCameraHandle->ops, NULL, NULL, &padding_info,
2675 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2676 mMetadataChannel);
2677 if (NULL == mDepthChannel) {
2678 LOGE("Allocation of depth channel failed");
2679 pthread_mutex_unlock(&mMutex);
2680 return NO_MEMORY;
2681 }
2682 newStream->priv = mDepthChannel;
2683 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2684 } else {
2685 // Max live snapshot inflight buffer is 1. This is to mitigate
2686 // frame drop issues for video snapshot. The more buffers being
2687 // allocated, the more frame drops there are.
2688 mPictureChannel = new QCamera3PicChannel(
2689 mCameraHandle->camera_handle, mChannelHandle,
2690 mCameraHandle->ops, captureResultCb,
2691 setBufferErrorStatus, &padding_info, this, newStream,
2692 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2693 m_bIs4KVideo, isZsl, mMetadataChannel,
2694 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2695 if (mPictureChannel == NULL) {
2696 LOGE("allocation of channel failed");
2697 pthread_mutex_unlock(&mMutex);
2698 return -ENOMEM;
2699 }
2700 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2701 newStream->max_buffers = mPictureChannel->getNumBuffers();
2702 mPictureChannel->overrideYuvSize(
2703 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2704 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 break;
2707
2708 default:
2709 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002710 pthread_mutex_unlock(&mMutex);
2711 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002712 }
2713 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2714 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2715 } else {
2716 LOGE("Error, Unknown stream type");
2717 pthread_mutex_unlock(&mMutex);
2718 return -EINVAL;
2719 }
2720
2721 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002722 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002723 // Here we only care whether it's EIS3 or not
2724 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2725 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2726 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2727 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002728 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002729 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002730 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2732 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2733 }
2734 }
2735
2736 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2737 it != mStreamInfo.end(); it++) {
2738 if ((*it)->stream == newStream) {
2739 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2740 break;
2741 }
2742 }
2743 } else {
2744 // Channel already exists for this stream
2745 // Do nothing for now
2746 }
2747 padding_info = gCamCapability[mCameraId]->padding_info;
2748
Emilian Peev7650c122017-01-19 08:24:33 -08002749 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002750 * since there is no real stream associated with it
2751 */
Emilian Peev7650c122017-01-19 08:24:33 -08002752 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002753 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2754 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002755 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002756 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002757 }
2758
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002759 // Let buffer dispatcher know the configured streams.
2760 mOutputBufferDispatcher.configureStreams(streamList);
2761
Binhao Lincdb362a2017-04-20 13:31:54 -07002762 // By default, preview stream TNR is disabled.
2763 // Enable TNR to the preview stream if all conditions below are satisfied:
2764 // 1. resolution <= 1080p.
2765 // 2. preview resolution == video resolution.
2766 // 3. video stream TNR is enabled.
2767 // 4. EIS2.0
2768 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2769 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2770 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2771 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2772 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2773 video_stream->width == preview_stream->width &&
2774 video_stream->height == preview_stream->height) {
2775 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2776 CAM_QCOM_FEATURE_CPP_TNR;
2777 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2778 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2779 ~CAM_QCOM_FEATURE_CDS;
2780 }
2781 }
2782
Thierry Strudel2896d122017-02-23 19:18:03 -08002783 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2784 onlyRaw = false;
2785 }
2786
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002787 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002788 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002789 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002790 cam_analysis_info_t analysisInfo;
2791 int32_t ret = NO_ERROR;
2792 ret = mCommon.getAnalysisInfo(
2793 FALSE,
2794 analysisFeatureMask,
2795 &analysisInfo);
2796 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002797 cam_color_filter_arrangement_t analysis_color_arrangement =
2798 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2799 CAM_FILTER_ARRANGEMENT_Y :
2800 gCamCapability[mCameraId]->color_arrangement);
2801 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2802 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002803 cam_dimension_t analysisDim;
2804 analysisDim = mCommon.getMatchingDimension(previewSize,
2805 analysisInfo.analysis_recommended_res);
2806
2807 mAnalysisChannel = new QCamera3SupportChannel(
2808 mCameraHandle->camera_handle,
2809 mChannelHandle,
2810 mCameraHandle->ops,
2811 &analysisInfo.analysis_padding_info,
2812 analysisFeatureMask,
2813 CAM_STREAM_TYPE_ANALYSIS,
2814 &analysisDim,
2815 (analysisInfo.analysis_format
2816 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2817 : CAM_FORMAT_YUV_420_NV21),
2818 analysisInfo.hw_analysis_supported,
2819 gCamCapability[mCameraId]->color_arrangement,
2820 this,
2821 0); // force buffer count to 0
2822 } else {
2823 LOGW("getAnalysisInfo failed, ret = %d", ret);
2824 }
2825 if (!mAnalysisChannel) {
2826 LOGW("Analysis channel cannot be created");
2827 }
2828 }
2829
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 //RAW DUMP channel
2831 if (mEnableRawDump && isRawStreamRequested == false){
2832 cam_dimension_t rawDumpSize;
2833 rawDumpSize = getMaxRawSize(mCameraId);
2834 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2835 setPAAFSupport(rawDumpFeatureMask,
2836 CAM_STREAM_TYPE_RAW,
2837 gCamCapability[mCameraId]->color_arrangement);
2838 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2839 mChannelHandle,
2840 mCameraHandle->ops,
2841 rawDumpSize,
2842 &padding_info,
2843 this, rawDumpFeatureMask);
2844 if (!mRawDumpChannel) {
2845 LOGE("Raw Dump channel cannot be created");
2846 pthread_mutex_unlock(&mMutex);
2847 return -ENOMEM;
2848 }
2849 }
2850
Thierry Strudel3d639192016-09-09 11:52:26 -07002851 if (mAnalysisChannel) {
2852 cam_analysis_info_t analysisInfo;
2853 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2854 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2855 CAM_STREAM_TYPE_ANALYSIS;
2856 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2857 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002858 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002859 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2860 &analysisInfo);
2861 if (rc != NO_ERROR) {
2862 LOGE("getAnalysisInfo failed, ret = %d", rc);
2863 pthread_mutex_unlock(&mMutex);
2864 return rc;
2865 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002866 cam_color_filter_arrangement_t analysis_color_arrangement =
2867 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2868 CAM_FILTER_ARRANGEMENT_Y :
2869 gCamCapability[mCameraId]->color_arrangement);
2870 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2871 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2872 analysis_color_arrangement);
2873
Thierry Strudel3d639192016-09-09 11:52:26 -07002874 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002875 mCommon.getMatchingDimension(previewSize,
2876 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002877 mStreamConfigInfo.num_streams++;
2878 }
2879
Thierry Strudel2896d122017-02-23 19:18:03 -08002880 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002881 cam_analysis_info_t supportInfo;
2882 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2883 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2884 setPAAFSupport(callbackFeatureMask,
2885 CAM_STREAM_TYPE_CALLBACK,
2886 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002887 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002888 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002889 if (ret != NO_ERROR) {
2890 /* Ignore the error for Mono camera
2891 * because the PAAF bit mask is only set
2892 * for CAM_STREAM_TYPE_ANALYSIS stream type
2893 */
2894 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2895 LOGW("getAnalysisInfo failed, ret = %d", ret);
2896 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002897 }
2898 mSupportChannel = new QCamera3SupportChannel(
2899 mCameraHandle->camera_handle,
2900 mChannelHandle,
2901 mCameraHandle->ops,
2902 &gCamCapability[mCameraId]->padding_info,
2903 callbackFeatureMask,
2904 CAM_STREAM_TYPE_CALLBACK,
2905 &QCamera3SupportChannel::kDim,
2906 CAM_FORMAT_YUV_420_NV21,
2907 supportInfo.hw_analysis_supported,
2908 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002909 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002910 if (!mSupportChannel) {
2911 LOGE("dummy channel cannot be created");
2912 pthread_mutex_unlock(&mMutex);
2913 return -ENOMEM;
2914 }
2915 }
2916
2917 if (mSupportChannel) {
2918 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2919 QCamera3SupportChannel::kDim;
2920 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2921 CAM_STREAM_TYPE_CALLBACK;
2922 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2923 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2924 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2925 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2926 gCamCapability[mCameraId]->color_arrangement);
2927 mStreamConfigInfo.num_streams++;
2928 }
2929
2930 if (mRawDumpChannel) {
2931 cam_dimension_t rawSize;
2932 rawSize = getMaxRawSize(mCameraId);
2933 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2934 rawSize;
2935 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2936 CAM_STREAM_TYPE_RAW;
2937 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2938 CAM_QCOM_FEATURE_NONE;
2939 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2940 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2941 gCamCapability[mCameraId]->color_arrangement);
2942 mStreamConfigInfo.num_streams++;
2943 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002944
2945 if (mHdrPlusRawSrcChannel) {
2946 cam_dimension_t rawSize;
2947 rawSize = getMaxRawSize(mCameraId);
2948 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2949 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2950 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2951 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2952 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2953 gCamCapability[mCameraId]->color_arrangement);
2954 mStreamConfigInfo.num_streams++;
2955 }
2956
Thierry Strudel3d639192016-09-09 11:52:26 -07002957 /* In HFR mode, if video stream is not added, create a dummy channel so that
2958 * ISP can create a batch mode even for preview only case. This channel is
2959 * never 'start'ed (no stream-on), it is only 'initialized' */
2960 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2961 !m_bIsVideo) {
2962 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2963 setPAAFSupport(dummyFeatureMask,
2964 CAM_STREAM_TYPE_VIDEO,
2965 gCamCapability[mCameraId]->color_arrangement);
2966 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2967 mChannelHandle,
2968 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002969 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002970 this,
2971 &mDummyBatchStream,
2972 CAM_STREAM_TYPE_VIDEO,
2973 dummyFeatureMask,
2974 mMetadataChannel);
2975 if (NULL == mDummyBatchChannel) {
2976 LOGE("creation of mDummyBatchChannel failed."
2977 "Preview will use non-hfr sensor mode ");
2978 }
2979 }
2980 if (mDummyBatchChannel) {
2981 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2982 mDummyBatchStream.width;
2983 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2984 mDummyBatchStream.height;
2985 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2986 CAM_STREAM_TYPE_VIDEO;
2987 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2988 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2989 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2990 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2991 gCamCapability[mCameraId]->color_arrangement);
2992 mStreamConfigInfo.num_streams++;
2993 }
2994
2995 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2996 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002997 m_bIs4KVideo ? 0 :
2998 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002999
3000 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3001 for (pendingRequestIterator i = mPendingRequestsList.begin();
3002 i != mPendingRequestsList.end();) {
3003 i = erasePendingRequest(i);
3004 }
3005 mPendingFrameDropList.clear();
3006 // Initialize/Reset the pending buffers list
3007 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3008 req.mPendingBufferList.clear();
3009 }
3010 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3011
Thierry Strudel3d639192016-09-09 11:52:26 -07003012 mCurJpegMeta.clear();
3013 //Get min frame duration for this streams configuration
3014 deriveMinFrameDuration();
3015
Chien-Yu Chenee335912017-02-09 17:53:20 -08003016 mFirstPreviewIntentSeen = false;
3017
3018 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003019 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003020 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3021 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003022 disableHdrPlusModeLocked();
3023 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003024
Thierry Strudel3d639192016-09-09 11:52:26 -07003025 // Update state
3026 mState = CONFIGURED;
3027
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003028 mFirstMetadataCallback = true;
3029
Thierry Strudel3d639192016-09-09 11:52:26 -07003030 pthread_mutex_unlock(&mMutex);
3031
3032 return rc;
3033}
3034
3035/*===========================================================================
3036 * FUNCTION : validateCaptureRequest
3037 *
3038 * DESCRIPTION: validate a capture request from camera service
3039 *
3040 * PARAMETERS :
3041 * @request : request from framework to process
3042 *
3043 * RETURN :
3044 *
3045 *==========================================================================*/
3046int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003047 camera3_capture_request_t *request,
3048 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003049{
3050 ssize_t idx = 0;
3051 const camera3_stream_buffer_t *b;
3052 CameraMetadata meta;
3053
3054 /* Sanity check the request */
3055 if (request == NULL) {
3056 LOGE("NULL capture request");
3057 return BAD_VALUE;
3058 }
3059
3060 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3061 /*settings cannot be null for the first request*/
3062 return BAD_VALUE;
3063 }
3064
3065 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003066 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3067 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003068 LOGE("Request %d: No output buffers provided!",
3069 __FUNCTION__, frameNumber);
3070 return BAD_VALUE;
3071 }
3072 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3073 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3074 request->num_output_buffers, MAX_NUM_STREAMS);
3075 return BAD_VALUE;
3076 }
3077 if (request->input_buffer != NULL) {
3078 b = request->input_buffer;
3079 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3080 LOGE("Request %d: Buffer %ld: Status not OK!",
3081 frameNumber, (long)idx);
3082 return BAD_VALUE;
3083 }
3084 if (b->release_fence != -1) {
3085 LOGE("Request %d: Buffer %ld: Has a release fence!",
3086 frameNumber, (long)idx);
3087 return BAD_VALUE;
3088 }
3089 if (b->buffer == NULL) {
3090 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3091 frameNumber, (long)idx);
3092 return BAD_VALUE;
3093 }
3094 }
3095
3096 // Validate all buffers
3097 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003098 if (b == NULL) {
3099 return BAD_VALUE;
3100 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003101 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003102 QCamera3ProcessingChannel *channel =
3103 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3104 if (channel == NULL) {
3105 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3106 frameNumber, (long)idx);
3107 return BAD_VALUE;
3108 }
3109 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3110 LOGE("Request %d: Buffer %ld: Status not OK!",
3111 frameNumber, (long)idx);
3112 return BAD_VALUE;
3113 }
3114 if (b->release_fence != -1) {
3115 LOGE("Request %d: Buffer %ld: Has a release fence!",
3116 frameNumber, (long)idx);
3117 return BAD_VALUE;
3118 }
3119 if (b->buffer == NULL) {
3120 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3121 frameNumber, (long)idx);
3122 return BAD_VALUE;
3123 }
3124 if (*(b->buffer) == NULL) {
3125 LOGE("Request %d: Buffer %ld: NULL private handle!",
3126 frameNumber, (long)idx);
3127 return BAD_VALUE;
3128 }
3129 idx++;
3130 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003131 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003132 return NO_ERROR;
3133}
3134
3135/*===========================================================================
3136 * FUNCTION : deriveMinFrameDuration
3137 *
3138 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3139 * on currently configured streams.
3140 *
3141 * PARAMETERS : NONE
3142 *
3143 * RETURN : NONE
3144 *
3145 *==========================================================================*/
3146void QCamera3HardwareInterface::deriveMinFrameDuration()
3147{
3148 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003149 bool hasRaw = false;
3150
3151 mMinRawFrameDuration = 0;
3152 mMinJpegFrameDuration = 0;
3153 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003154
3155 maxJpegDim = 0;
3156 maxProcessedDim = 0;
3157 maxRawDim = 0;
3158
3159 // Figure out maximum jpeg, processed, and raw dimensions
3160 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3161 it != mStreamInfo.end(); it++) {
3162
3163 // Input stream doesn't have valid stream_type
3164 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3165 continue;
3166
3167 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3168 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3169 if (dimension > maxJpegDim)
3170 maxJpegDim = dimension;
3171 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3172 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3173 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003174 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003175 if (dimension > maxRawDim)
3176 maxRawDim = dimension;
3177 } else {
3178 if (dimension > maxProcessedDim)
3179 maxProcessedDim = dimension;
3180 }
3181 }
3182
3183 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3184 MAX_SIZES_CNT);
3185
3186 //Assume all jpeg dimensions are in processed dimensions.
3187 if (maxJpegDim > maxProcessedDim)
3188 maxProcessedDim = maxJpegDim;
3189 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003190 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003191 maxRawDim = INT32_MAX;
3192
3193 for (size_t i = 0; i < count; i++) {
3194 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3195 gCamCapability[mCameraId]->raw_dim[i].height;
3196 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3197 maxRawDim = dimension;
3198 }
3199 }
3200
3201 //Find minimum durations for processed, jpeg, and raw
3202 for (size_t i = 0; i < count; i++) {
3203 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3204 gCamCapability[mCameraId]->raw_dim[i].height) {
3205 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3206 break;
3207 }
3208 }
3209 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3210 for (size_t i = 0; i < count; i++) {
3211 if (maxProcessedDim ==
3212 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3213 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3214 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3215 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3216 break;
3217 }
3218 }
3219}
3220
3221/*===========================================================================
3222 * FUNCTION : getMinFrameDuration
3223 *
3224 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3225 * and current request configuration.
3226 *
3227 * PARAMETERS : @request: requset sent by the frameworks
3228 *
3229 * RETURN : min farme duration for a particular request
3230 *
3231 *==========================================================================*/
3232int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3233{
3234 bool hasJpegStream = false;
3235 bool hasRawStream = false;
3236 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3237 const camera3_stream_t *stream = request->output_buffers[i].stream;
3238 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3239 hasJpegStream = true;
3240 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3241 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3242 stream->format == HAL_PIXEL_FORMAT_RAW16)
3243 hasRawStream = true;
3244 }
3245
3246 if (!hasJpegStream)
3247 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3248 else
3249 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3250}
3251
3252/*===========================================================================
3253 * FUNCTION : handleBuffersDuringFlushLock
3254 *
3255 * DESCRIPTION: Account for buffers returned from back-end during flush
3256 * This function is executed while mMutex is held by the caller.
3257 *
3258 * PARAMETERS :
3259 * @buffer: image buffer for the callback
3260 *
3261 * RETURN :
3262 *==========================================================================*/
3263void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3264{
3265 bool buffer_found = false;
3266 for (List<PendingBuffersInRequest>::iterator req =
3267 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3268 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3269 for (List<PendingBufferInfo>::iterator i =
3270 req->mPendingBufferList.begin();
3271 i != req->mPendingBufferList.end(); i++) {
3272 if (i->buffer == buffer->buffer) {
3273 mPendingBuffersMap.numPendingBufsAtFlush--;
3274 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3275 buffer->buffer, req->frame_number,
3276 mPendingBuffersMap.numPendingBufsAtFlush);
3277 buffer_found = true;
3278 break;
3279 }
3280 }
3281 if (buffer_found) {
3282 break;
3283 }
3284 }
3285 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3286 //signal the flush()
3287 LOGD("All buffers returned to HAL. Continue flush");
3288 pthread_cond_signal(&mBuffersCond);
3289 }
3290}
3291
Thierry Strudel3d639192016-09-09 11:52:26 -07003292/*===========================================================================
3293 * FUNCTION : handleBatchMetadata
3294 *
3295 * DESCRIPTION: Handles metadata buffer callback in batch mode
3296 *
3297 * PARAMETERS : @metadata_buf: metadata buffer
3298 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3299 * the meta buf in this method
3300 *
3301 * RETURN :
3302 *
3303 *==========================================================================*/
3304void QCamera3HardwareInterface::handleBatchMetadata(
3305 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3306{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003307 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003308
3309 if (NULL == metadata_buf) {
3310 LOGE("metadata_buf is NULL");
3311 return;
3312 }
3313 /* In batch mode, the metdata will contain the frame number and timestamp of
3314 * the last frame in the batch. Eg: a batch containing buffers from request
3315 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3316 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3317 * multiple process_capture_results */
3318 metadata_buffer_t *metadata =
3319 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3320 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3321 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3322 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3323 uint32_t frame_number = 0, urgent_frame_number = 0;
3324 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3325 bool invalid_metadata = false;
3326 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3327 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003328 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003329
3330 int32_t *p_frame_number_valid =
3331 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3332 uint32_t *p_frame_number =
3333 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3334 int64_t *p_capture_time =
3335 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3336 int32_t *p_urgent_frame_number_valid =
3337 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3338 uint32_t *p_urgent_frame_number =
3339 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3340
3341 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3342 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3343 (NULL == p_urgent_frame_number)) {
3344 LOGE("Invalid metadata");
3345 invalid_metadata = true;
3346 } else {
3347 frame_number_valid = *p_frame_number_valid;
3348 last_frame_number = *p_frame_number;
3349 last_frame_capture_time = *p_capture_time;
3350 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3351 last_urgent_frame_number = *p_urgent_frame_number;
3352 }
3353
3354 /* In batchmode, when no video buffers are requested, set_parms are sent
3355 * for every capture_request. The difference between consecutive urgent
3356 * frame numbers and frame numbers should be used to interpolate the
3357 * corresponding frame numbers and time stamps */
3358 pthread_mutex_lock(&mMutex);
3359 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003360 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3361 if(idx < 0) {
3362 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3363 last_urgent_frame_number);
3364 mState = ERROR;
3365 pthread_mutex_unlock(&mMutex);
3366 return;
3367 }
3368 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003369 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3370 first_urgent_frame_number;
3371
3372 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3373 urgent_frame_number_valid,
3374 first_urgent_frame_number, last_urgent_frame_number);
3375 }
3376
3377 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003378 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3379 if(idx < 0) {
3380 LOGE("Invalid frame number received: %d. Irrecoverable error",
3381 last_frame_number);
3382 mState = ERROR;
3383 pthread_mutex_unlock(&mMutex);
3384 return;
3385 }
3386 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003387 frameNumDiff = last_frame_number + 1 -
3388 first_frame_number;
3389 mPendingBatchMap.removeItem(last_frame_number);
3390
3391 LOGD("frm: valid: %d frm_num: %d - %d",
3392 frame_number_valid,
3393 first_frame_number, last_frame_number);
3394
3395 }
3396 pthread_mutex_unlock(&mMutex);
3397
3398 if (urgent_frame_number_valid || frame_number_valid) {
3399 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3400 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3401 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3402 urgentFrameNumDiff, last_urgent_frame_number);
3403 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3404 LOGE("frameNumDiff: %d frameNum: %d",
3405 frameNumDiff, last_frame_number);
3406 }
3407
3408 for (size_t i = 0; i < loopCount; i++) {
3409 /* handleMetadataWithLock is called even for invalid_metadata for
3410 * pipeline depth calculation */
3411 if (!invalid_metadata) {
3412 /* Infer frame number. Batch metadata contains frame number of the
3413 * last frame */
3414 if (urgent_frame_number_valid) {
3415 if (i < urgentFrameNumDiff) {
3416 urgent_frame_number =
3417 first_urgent_frame_number + i;
3418 LOGD("inferred urgent frame_number: %d",
3419 urgent_frame_number);
3420 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3421 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3422 } else {
3423 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3424 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3425 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3426 }
3427 }
3428
3429 /* Infer frame number. Batch metadata contains frame number of the
3430 * last frame */
3431 if (frame_number_valid) {
3432 if (i < frameNumDiff) {
3433 frame_number = first_frame_number + i;
3434 LOGD("inferred frame_number: %d", frame_number);
3435 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3436 CAM_INTF_META_FRAME_NUMBER, frame_number);
3437 } else {
3438 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3439 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3440 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3441 }
3442 }
3443
3444 if (last_frame_capture_time) {
3445 //Infer timestamp
3446 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003447 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003448 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003449 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003450 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3451 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3452 LOGD("batch capture_time: %lld, capture_time: %lld",
3453 last_frame_capture_time, capture_time);
3454 }
3455 }
3456 pthread_mutex_lock(&mMutex);
3457 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003458 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003459 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3460 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003461 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003462 pthread_mutex_unlock(&mMutex);
3463 }
3464
3465 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003466 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003467 mMetadataChannel->bufDone(metadata_buf);
3468 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003469 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003470 }
3471}
3472
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003473void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3474 camera3_error_msg_code_t errorCode)
3475{
3476 camera3_notify_msg_t notify_msg;
3477 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3478 notify_msg.type = CAMERA3_MSG_ERROR;
3479 notify_msg.message.error.error_code = errorCode;
3480 notify_msg.message.error.error_stream = NULL;
3481 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003482 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003483
3484 return;
3485}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003486
3487/*===========================================================================
3488 * FUNCTION : sendPartialMetadataWithLock
3489 *
3490 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3491 *
3492 * PARAMETERS : @metadata: metadata buffer
3493 * @requestIter: The iterator for the pending capture request for
3494 * which the partial result is being sen
3495 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3496 * last urgent metadata in a batch. Always true for non-batch mode
3497 *
3498 * RETURN :
3499 *
3500 *==========================================================================*/
3501
3502void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3503 metadata_buffer_t *metadata,
3504 const pendingRequestIterator requestIter,
3505 bool lastUrgentMetadataInBatch)
3506{
3507 camera3_capture_result_t result;
3508 memset(&result, 0, sizeof(camera3_capture_result_t));
3509
3510 requestIter->partial_result_cnt++;
3511
3512 // Extract 3A metadata
3513 result.result = translateCbUrgentMetadataToResultMetadata(
3514 metadata, lastUrgentMetadataInBatch);
3515 // Populate metadata result
3516 result.frame_number = requestIter->frame_number;
3517 result.num_output_buffers = 0;
3518 result.output_buffers = NULL;
3519 result.partial_result = requestIter->partial_result_cnt;
3520
3521 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003522 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003523 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3524 // Notify HDR+ client about the partial metadata.
3525 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3526 result.partial_result == PARTIAL_RESULT_COUNT);
3527 }
3528 }
3529
3530 orchestrateResult(&result);
3531 LOGD("urgent frame_number = %u", result.frame_number);
3532 free_camera_metadata((camera_metadata_t *)result.result);
3533}
3534
Thierry Strudel3d639192016-09-09 11:52:26 -07003535/*===========================================================================
3536 * FUNCTION : handleMetadataWithLock
3537 *
3538 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3539 *
3540 * PARAMETERS : @metadata_buf: metadata buffer
3541 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3542 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003543 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3544 * last urgent metadata in a batch. Always true for non-batch mode
3545 * @lastMetadataInBatch: Boolean to indicate whether this is the
3546 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003547 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3548 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003549 *
3550 * RETURN :
3551 *
3552 *==========================================================================*/
3553void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003554 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003555 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3556 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003557{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003558 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003559 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3560 //during flush do not send metadata from this thread
3561 LOGD("not sending metadata during flush or when mState is error");
3562 if (free_and_bufdone_meta_buf) {
3563 mMetadataChannel->bufDone(metadata_buf);
3564 free(metadata_buf);
3565 }
3566 return;
3567 }
3568
3569 //not in flush
3570 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3571 int32_t frame_number_valid, urgent_frame_number_valid;
3572 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003573 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003574 nsecs_t currentSysTime;
3575
3576 int32_t *p_frame_number_valid =
3577 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3578 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3579 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003580 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003581 int32_t *p_urgent_frame_number_valid =
3582 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3583 uint32_t *p_urgent_frame_number =
3584 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3585 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3586 metadata) {
3587 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3588 *p_frame_number_valid, *p_frame_number);
3589 }
3590
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003591 camera_metadata_t *resultMetadata = nullptr;
3592
Thierry Strudel3d639192016-09-09 11:52:26 -07003593 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3594 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3595 LOGE("Invalid metadata");
3596 if (free_and_bufdone_meta_buf) {
3597 mMetadataChannel->bufDone(metadata_buf);
3598 free(metadata_buf);
3599 }
3600 goto done_metadata;
3601 }
3602 frame_number_valid = *p_frame_number_valid;
3603 frame_number = *p_frame_number;
3604 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003605 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003606 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3607 urgent_frame_number = *p_urgent_frame_number;
3608 currentSysTime = systemTime(CLOCK_MONOTONIC);
3609
Jason Lee603176d2017-05-31 11:43:27 -07003610 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3611 const int tries = 3;
3612 nsecs_t bestGap, measured;
3613 for (int i = 0; i < tries; ++i) {
3614 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3615 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3616 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3617 const nsecs_t gap = tmono2 - tmono;
3618 if (i == 0 || gap < bestGap) {
3619 bestGap = gap;
3620 measured = tbase - ((tmono + tmono2) >> 1);
3621 }
3622 }
3623 capture_time -= measured;
3624 }
3625
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 // Detect if buffers from any requests are overdue
3627 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003628 int64_t timeout;
3629 {
3630 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3631 // If there is a pending HDR+ request, the following requests may be blocked until the
3632 // HDR+ request is done. So allow a longer timeout.
3633 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3634 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3635 }
3636
3637 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003638 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003639 assert(missed.stream->priv);
3640 if (missed.stream->priv) {
3641 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3642 assert(ch->mStreams[0]);
3643 if (ch->mStreams[0]) {
3644 LOGE("Cancel missing frame = %d, buffer = %p,"
3645 "stream type = %d, stream format = %d",
3646 req.frame_number, missed.buffer,
3647 ch->mStreams[0]->getMyType(), missed.stream->format);
3648 ch->timeoutFrame(req.frame_number);
3649 }
3650 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003651 }
3652 }
3653 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003654 //For the very first metadata callback, regardless whether it contains valid
3655 //frame number, send the partial metadata for the jumpstarting requests.
3656 //Note that this has to be done even if the metadata doesn't contain valid
3657 //urgent frame number, because in the case only 1 request is ever submitted
3658 //to HAL, there won't be subsequent valid urgent frame number.
3659 if (mFirstMetadataCallback) {
3660 for (pendingRequestIterator i =
3661 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3662 if (i->bUseFirstPartial) {
3663 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3664 }
3665 }
3666 mFirstMetadataCallback = false;
3667 }
3668
Thierry Strudel3d639192016-09-09 11:52:26 -07003669 //Partial result on process_capture_result for timestamp
3670 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003671 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003672
3673 //Recieved an urgent Frame Number, handle it
3674 //using partial results
3675 for (pendingRequestIterator i =
3676 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3677 LOGD("Iterator Frame = %d urgent frame = %d",
3678 i->frame_number, urgent_frame_number);
3679
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003680 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003681 (i->partial_result_cnt == 0)) {
3682 LOGE("Error: HAL missed urgent metadata for frame number %d",
3683 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003684 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003685 }
3686
3687 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003688 i->partial_result_cnt == 0) {
3689 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003690 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3691 // Instant AEC settled for this frame.
3692 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3693 mInstantAECSettledFrameNumber = urgent_frame_number;
3694 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003695 break;
3696 }
3697 }
3698 }
3699
3700 if (!frame_number_valid) {
3701 LOGD("Not a valid normal frame number, used as SOF only");
3702 if (free_and_bufdone_meta_buf) {
3703 mMetadataChannel->bufDone(metadata_buf);
3704 free(metadata_buf);
3705 }
3706 goto done_metadata;
3707 }
3708 LOGH("valid frame_number = %u, capture_time = %lld",
3709 frame_number, capture_time);
3710
Emilian Peev7650c122017-01-19 08:24:33 -08003711 if (metadata->is_depth_data_valid) {
3712 handleDepthDataLocked(metadata->depth_data, frame_number);
3713 }
3714
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003715 // Check whether any stream buffer corresponding to this is dropped or not
3716 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3717 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3718 for (auto & pendingRequest : mPendingRequestsList) {
3719 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3720 mInstantAECSettledFrameNumber)) {
3721 camera3_notify_msg_t notify_msg = {};
3722 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003723 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003724 QCamera3ProcessingChannel *channel =
3725 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003726 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003727 if (p_cam_frame_drop) {
3728 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003729 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003730 // Got the stream ID for drop frame.
3731 dropFrame = true;
3732 break;
3733 }
3734 }
3735 } else {
3736 // This is instant AEC case.
3737 // For instant AEC drop the stream untill AEC is settled.
3738 dropFrame = true;
3739 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003740
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003741 if (dropFrame) {
3742 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3743 if (p_cam_frame_drop) {
3744 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003745 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003746 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003747 } else {
3748 // For instant AEC, inform frame drop and frame number
3749 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3750 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003751 pendingRequest.frame_number, streamID,
3752 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003753 }
3754 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003755 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003756 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003757 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003758 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003759 if (p_cam_frame_drop) {
3760 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003761 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003762 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003763 } else {
3764 // For instant AEC, inform frame drop and frame number
3765 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3766 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003767 pendingRequest.frame_number, streamID,
3768 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003769 }
3770 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003771 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003772 PendingFrameDrop.stream_ID = streamID;
3773 // Add the Frame drop info to mPendingFrameDropList
3774 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003775 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003776 }
3777 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003778 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003779
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003780 for (auto & pendingRequest : mPendingRequestsList) {
3781 // Find the pending request with the frame number.
3782 if (pendingRequest.frame_number == frame_number) {
3783 // Update the sensor timestamp.
3784 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003785
Thierry Strudel3d639192016-09-09 11:52:26 -07003786
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003787 /* Set the timestamp in display metadata so that clients aware of
3788 private_handle such as VT can use this un-modified timestamps.
3789 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003790 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003791
Thierry Strudel3d639192016-09-09 11:52:26 -07003792 // Find channel requiring metadata, meaning internal offline postprocess
3793 // is needed.
3794 //TODO: for now, we don't support two streams requiring metadata at the same time.
3795 // (because we are not making copies, and metadata buffer is not reference counted.
3796 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003797 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3798 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003799 if (iter->need_metadata) {
3800 internalPproc = true;
3801 QCamera3ProcessingChannel *channel =
3802 (QCamera3ProcessingChannel *)iter->stream->priv;
3803 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003804 if(p_is_metabuf_queued != NULL) {
3805 *p_is_metabuf_queued = true;
3806 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003807 break;
3808 }
3809 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003810 for (auto itr = pendingRequest.internalRequestList.begin();
3811 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003812 if (itr->need_metadata) {
3813 internalPproc = true;
3814 QCamera3ProcessingChannel *channel =
3815 (QCamera3ProcessingChannel *)itr->stream->priv;
3816 channel->queueReprocMetadata(metadata_buf);
3817 break;
3818 }
3819 }
3820
Thierry Strudel54dc9782017-02-15 12:12:10 -08003821 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003822
3823 bool *enableZsl = nullptr;
3824 if (gExposeEnableZslKey) {
3825 enableZsl = &pendingRequest.enableZsl;
3826 }
3827
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003828 resultMetadata = translateFromHalMetadata(metadata,
3829 pendingRequest.timestamp, pendingRequest.request_id,
3830 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3831 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003832 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003833 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003834 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003835 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003836 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003837 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003838
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003839 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003840
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003841 if (pendingRequest.blob_request) {
3842 //Dump tuning metadata if enabled and available
3843 char prop[PROPERTY_VALUE_MAX];
3844 memset(prop, 0, sizeof(prop));
3845 property_get("persist.camera.dumpmetadata", prop, "0");
3846 int32_t enabled = atoi(prop);
3847 if (enabled && metadata->is_tuning_params_valid) {
3848 dumpMetadataToFile(metadata->tuning_params,
3849 mMetaFrameCount,
3850 enabled,
3851 "Snapshot",
3852 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003853 }
3854 }
3855
3856 if (!internalPproc) {
3857 LOGD("couldn't find need_metadata for this metadata");
3858 // Return metadata buffer
3859 if (free_and_bufdone_meta_buf) {
3860 mMetadataChannel->bufDone(metadata_buf);
3861 free(metadata_buf);
3862 }
3863 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003864
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003865 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003866 }
3867 }
3868
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003869 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3870
3871 // Try to send out capture result metadata.
3872 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003873 return;
3874
Thierry Strudel3d639192016-09-09 11:52:26 -07003875done_metadata:
3876 for (pendingRequestIterator i = mPendingRequestsList.begin();
3877 i != mPendingRequestsList.end() ;i++) {
3878 i->pipeline_depth++;
3879 }
3880 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3881 unblockRequestIfNecessary();
3882}
3883
3884/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003885 * FUNCTION : handleDepthDataWithLock
3886 *
3887 * DESCRIPTION: Handles incoming depth data
3888 *
3889 * PARAMETERS : @depthData : Depth data
3890 * @frameNumber: Frame number of the incoming depth data
3891 *
3892 * RETURN :
3893 *
3894 *==========================================================================*/
3895void QCamera3HardwareInterface::handleDepthDataLocked(
3896 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3897 uint32_t currentFrameNumber;
3898 buffer_handle_t *depthBuffer;
3899
3900 if (nullptr == mDepthChannel) {
3901 LOGE("Depth channel not present!");
3902 return;
3903 }
3904
3905 camera3_stream_buffer_t resultBuffer =
3906 {.acquire_fence = -1,
3907 .release_fence = -1,
3908 .status = CAMERA3_BUFFER_STATUS_OK,
3909 .buffer = nullptr,
3910 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003911 do {
3912 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3913 if (nullptr == depthBuffer) {
3914 break;
3915 }
3916
Emilian Peev7650c122017-01-19 08:24:33 -08003917 resultBuffer.buffer = depthBuffer;
3918 if (currentFrameNumber == frameNumber) {
3919 int32_t rc = mDepthChannel->populateDepthData(depthData,
3920 frameNumber);
3921 if (NO_ERROR != rc) {
3922 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3923 } else {
3924 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3925 }
3926 } else if (currentFrameNumber > frameNumber) {
3927 break;
3928 } else {
3929 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3930 {{currentFrameNumber, mDepthChannel->getStream(),
3931 CAMERA3_MSG_ERROR_BUFFER}}};
3932 orchestrateNotify(&notify_msg);
3933
3934 LOGE("Depth buffer for frame number: %d is missing "
3935 "returning back!", currentFrameNumber);
3936 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3937 }
3938 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003939 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003940 } while (currentFrameNumber < frameNumber);
3941}
3942
3943/*===========================================================================
3944 * FUNCTION : notifyErrorFoPendingDepthData
3945 *
3946 * DESCRIPTION: Returns error for any pending depth buffers
3947 *
3948 * PARAMETERS : depthCh - depth channel that needs to get flushed
3949 *
3950 * RETURN :
3951 *
3952 *==========================================================================*/
3953void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3954 QCamera3DepthChannel *depthCh) {
3955 uint32_t currentFrameNumber;
3956 buffer_handle_t *depthBuffer;
3957
3958 if (nullptr == depthCh) {
3959 return;
3960 }
3961
3962 camera3_notify_msg_t notify_msg =
3963 {.type = CAMERA3_MSG_ERROR,
3964 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3965 camera3_stream_buffer_t resultBuffer =
3966 {.acquire_fence = -1,
3967 .release_fence = -1,
3968 .buffer = nullptr,
3969 .stream = depthCh->getStream(),
3970 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003971
3972 while (nullptr !=
3973 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3974 depthCh->unmapBuffer(currentFrameNumber);
3975
3976 notify_msg.message.error.frame_number = currentFrameNumber;
3977 orchestrateNotify(&notify_msg);
3978
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003979 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003980 };
3981}
3982
3983/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003984 * FUNCTION : hdrPlusPerfLock
3985 *
3986 * DESCRIPTION: perf lock for HDR+ using custom intent
3987 *
3988 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3989 *
3990 * RETURN : None
3991 *
3992 *==========================================================================*/
3993void QCamera3HardwareInterface::hdrPlusPerfLock(
3994 mm_camera_super_buf_t *metadata_buf)
3995{
3996 if (NULL == metadata_buf) {
3997 LOGE("metadata_buf is NULL");
3998 return;
3999 }
4000 metadata_buffer_t *metadata =
4001 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4002 int32_t *p_frame_number_valid =
4003 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4004 uint32_t *p_frame_number =
4005 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4006
4007 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4008 LOGE("%s: Invalid metadata", __func__);
4009 return;
4010 }
4011
4012 //acquire perf lock for 5 sec after the last HDR frame is captured
4013 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4014 if ((p_frame_number != NULL) &&
4015 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004016 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004017 }
4018 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004019}
4020
4021/*===========================================================================
4022 * FUNCTION : handleInputBufferWithLock
4023 *
4024 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4025 *
4026 * PARAMETERS : @frame_number: frame number of the input buffer
4027 *
4028 * RETURN :
4029 *
4030 *==========================================================================*/
4031void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4032{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004033 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004034 pendingRequestIterator i = mPendingRequestsList.begin();
4035 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4036 i++;
4037 }
4038 if (i != mPendingRequestsList.end() && i->input_buffer) {
4039 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004040 CameraMetadata settings;
4041 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4042 if(i->settings) {
4043 settings = i->settings;
4044 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4045 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004046 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004047 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004048 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004049 } else {
4050 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004051 }
4052
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004053 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4054 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4055 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004056
4057 camera3_capture_result result;
4058 memset(&result, 0, sizeof(camera3_capture_result));
4059 result.frame_number = frame_number;
4060 result.result = i->settings;
4061 result.input_buffer = i->input_buffer;
4062 result.partial_result = PARTIAL_RESULT_COUNT;
4063
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004064 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004065 LOGD("Input request metadata and input buffer frame_number = %u",
4066 i->frame_number);
4067 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004068
4069 // Dispatch result metadata that may be just unblocked by this reprocess result.
4070 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004071 } else {
4072 LOGE("Could not find input request for frame number %d", frame_number);
4073 }
4074}
4075
4076/*===========================================================================
4077 * FUNCTION : handleBufferWithLock
4078 *
4079 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4080 *
4081 * PARAMETERS : @buffer: image buffer for the callback
4082 * @frame_number: frame number of the image buffer
4083 *
4084 * RETURN :
4085 *
4086 *==========================================================================*/
4087void QCamera3HardwareInterface::handleBufferWithLock(
4088 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4089{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004090 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004091
4092 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4093 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4094 }
4095
Thierry Strudel3d639192016-09-09 11:52:26 -07004096 /* Nothing to be done during error state */
4097 if ((ERROR == mState) || (DEINIT == mState)) {
4098 return;
4099 }
4100 if (mFlushPerf) {
4101 handleBuffersDuringFlushLock(buffer);
4102 return;
4103 }
4104 //not in flush
4105 // If the frame number doesn't exist in the pending request list,
4106 // directly send the buffer to the frameworks, and update pending buffers map
4107 // Otherwise, book-keep the buffer.
4108 pendingRequestIterator i = mPendingRequestsList.begin();
4109 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4110 i++;
4111 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004112
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004113 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004114 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004115 // For a reprocessing request, try to send out result metadata.
4116 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004117 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004118 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004119
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004120 // Check if this frame was dropped.
4121 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4122 m != mPendingFrameDropList.end(); m++) {
4123 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4124 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4125 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4126 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4127 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4128 frame_number, streamID);
4129 m = mPendingFrameDropList.erase(m);
4130 break;
4131 }
4132 }
4133
4134 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4135 LOGH("result frame_number = %d, buffer = %p",
4136 frame_number, buffer->buffer);
4137
4138 mPendingBuffersMap.removeBuf(buffer->buffer);
4139 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4140
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004141 if (mPreviewStarted == false) {
4142 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4143 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004144 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4145
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004146 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4147 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4148 mPreviewStarted = true;
4149
4150 // Set power hint for preview
4151 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4152 }
4153 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004154}
4155
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004156void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004157 const camera_metadata_t *resultMetadata)
4158{
4159 // Find the pending request for this result metadata.
4160 auto requestIter = mPendingRequestsList.begin();
4161 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4162 requestIter++;
4163 }
4164
4165 if (requestIter == mPendingRequestsList.end()) {
4166 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4167 return;
4168 }
4169
4170 // Update the result metadata
4171 requestIter->resultMetadata = resultMetadata;
4172
4173 // Check what type of request this is.
4174 bool liveRequest = false;
4175 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004176 // HDR+ request doesn't have partial results.
4177 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004178 } else if (requestIter->input_buffer != nullptr) {
4179 // Reprocessing request result is the same as settings.
4180 requestIter->resultMetadata = requestIter->settings;
4181 // Reprocessing request doesn't have partial results.
4182 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4183 } else {
4184 liveRequest = true;
4185 requestIter->partial_result_cnt++;
4186 mPendingLiveRequest--;
4187
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004188 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004189 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004190 // For a live request, send the metadata to HDR+ client.
4191 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4192 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4193 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4194 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004195 }
4196 }
4197
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004198 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4199}
4200
4201void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4202 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004203 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4204 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004205 bool readyToSend = true;
4206
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004207 // Iterate through the pending requests to send out result metadata that are ready. Also if
4208 // this result metadata belongs to a live request, notify errors for previous live requests
4209 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004210 auto iter = mPendingRequestsList.begin();
4211 while (iter != mPendingRequestsList.end()) {
4212 // Check if current pending request is ready. If it's not ready, the following pending
4213 // requests are also not ready.
4214 if (readyToSend && iter->resultMetadata == nullptr) {
4215 readyToSend = false;
4216 }
4217
4218 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4219
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004220 camera3_capture_result_t result = {};
4221 result.frame_number = iter->frame_number;
4222 result.result = iter->resultMetadata;
4223 result.partial_result = iter->partial_result_cnt;
4224
4225 // If this pending buffer has result metadata, we may be able to send out shutter callback
4226 // and result metadata.
4227 if (iter->resultMetadata != nullptr) {
4228 if (!readyToSend) {
4229 // If any of the previous pending request is not ready, this pending request is
4230 // also not ready to send in order to keep shutter callbacks and result metadata
4231 // in order.
4232 iter++;
4233 continue;
4234 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004235 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004236 // If the result metadata belongs to a live request, notify errors for previous pending
4237 // live requests.
4238 mPendingLiveRequest--;
4239
4240 CameraMetadata dummyMetadata;
4241 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4242 result.result = dummyMetadata.release();
4243
4244 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004245
4246 // partial_result should be PARTIAL_RESULT_CNT in case of
4247 // ERROR_RESULT.
4248 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4249 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004250 } else {
4251 iter++;
4252 continue;
4253 }
4254
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004255 result.output_buffers = nullptr;
4256 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004257 orchestrateResult(&result);
4258
4259 // For reprocessing, result metadata is the same as settings so do not free it here to
4260 // avoid double free.
4261 if (result.result != iter->settings) {
4262 free_camera_metadata((camera_metadata_t *)result.result);
4263 }
4264 iter->resultMetadata = nullptr;
4265 iter = erasePendingRequest(iter);
4266 }
4267
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004268 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004269 for (auto &iter : mPendingRequestsList) {
4270 // Increment pipeline depth for the following pending requests.
4271 if (iter.frame_number > frameNumber) {
4272 iter.pipeline_depth++;
4273 }
4274 }
4275 }
4276
4277 unblockRequestIfNecessary();
4278}
4279
Thierry Strudel3d639192016-09-09 11:52:26 -07004280/*===========================================================================
4281 * FUNCTION : unblockRequestIfNecessary
4282 *
4283 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4284 * that mMutex is held when this function is called.
4285 *
4286 * PARAMETERS :
4287 *
4288 * RETURN :
4289 *
4290 *==========================================================================*/
4291void QCamera3HardwareInterface::unblockRequestIfNecessary()
4292{
4293 // Unblock process_capture_request
4294 pthread_cond_signal(&mRequestCond);
4295}
4296
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004297/*===========================================================================
4298 * FUNCTION : isHdrSnapshotRequest
4299 *
4300 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4301 *
4302 * PARAMETERS : camera3 request structure
4303 *
4304 * RETURN : boolean decision variable
4305 *
4306 *==========================================================================*/
4307bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4308{
4309 if (request == NULL) {
4310 LOGE("Invalid request handle");
4311 assert(0);
4312 return false;
4313 }
4314
4315 if (!mForceHdrSnapshot) {
4316 CameraMetadata frame_settings;
4317 frame_settings = request->settings;
4318
4319 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4320 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4321 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4322 return false;
4323 }
4324 } else {
4325 return false;
4326 }
4327
4328 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4329 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4330 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4331 return false;
4332 }
4333 } else {
4334 return false;
4335 }
4336 }
4337
4338 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4339 if (request->output_buffers[i].stream->format
4340 == HAL_PIXEL_FORMAT_BLOB) {
4341 return true;
4342 }
4343 }
4344
4345 return false;
4346}
4347/*===========================================================================
4348 * FUNCTION : orchestrateRequest
4349 *
4350 * DESCRIPTION: Orchestrates a capture request from camera service
4351 *
4352 * PARAMETERS :
4353 * @request : request from framework to process
4354 *
4355 * RETURN : Error status codes
4356 *
4357 *==========================================================================*/
4358int32_t QCamera3HardwareInterface::orchestrateRequest(
4359 camera3_capture_request_t *request)
4360{
4361
4362 uint32_t originalFrameNumber = request->frame_number;
4363 uint32_t originalOutputCount = request->num_output_buffers;
4364 const camera_metadata_t *original_settings = request->settings;
4365 List<InternalRequest> internallyRequestedStreams;
4366 List<InternalRequest> emptyInternalList;
4367
4368 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4369 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4370 uint32_t internalFrameNumber;
4371 CameraMetadata modified_meta;
4372
4373
4374 /* Add Blob channel to list of internally requested streams */
4375 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4376 if (request->output_buffers[i].stream->format
4377 == HAL_PIXEL_FORMAT_BLOB) {
4378 InternalRequest streamRequested;
4379 streamRequested.meteringOnly = 1;
4380 streamRequested.need_metadata = 0;
4381 streamRequested.stream = request->output_buffers[i].stream;
4382 internallyRequestedStreams.push_back(streamRequested);
4383 }
4384 }
4385 request->num_output_buffers = 0;
4386 auto itr = internallyRequestedStreams.begin();
4387
4388 /* Modify setting to set compensation */
4389 modified_meta = request->settings;
4390 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4391 uint8_t aeLock = 1;
4392 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4393 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4394 camera_metadata_t *modified_settings = modified_meta.release();
4395 request->settings = modified_settings;
4396
4397 /* Capture Settling & -2x frame */
4398 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4399 request->frame_number = internalFrameNumber;
4400 processCaptureRequest(request, internallyRequestedStreams);
4401
4402 request->num_output_buffers = originalOutputCount;
4403 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4404 request->frame_number = internalFrameNumber;
4405 processCaptureRequest(request, emptyInternalList);
4406 request->num_output_buffers = 0;
4407
4408 modified_meta = modified_settings;
4409 expCompensation = 0;
4410 aeLock = 1;
4411 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4412 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4413 modified_settings = modified_meta.release();
4414 request->settings = modified_settings;
4415
4416 /* Capture Settling & 0X frame */
4417
4418 itr = internallyRequestedStreams.begin();
4419 if (itr == internallyRequestedStreams.end()) {
4420 LOGE("Error Internally Requested Stream list is empty");
4421 assert(0);
4422 } else {
4423 itr->need_metadata = 0;
4424 itr->meteringOnly = 1;
4425 }
4426
4427 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4428 request->frame_number = internalFrameNumber;
4429 processCaptureRequest(request, internallyRequestedStreams);
4430
4431 itr = internallyRequestedStreams.begin();
4432 if (itr == internallyRequestedStreams.end()) {
4433 ALOGE("Error Internally Requested Stream list is empty");
4434 assert(0);
4435 } else {
4436 itr->need_metadata = 1;
4437 itr->meteringOnly = 0;
4438 }
4439
4440 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4441 request->frame_number = internalFrameNumber;
4442 processCaptureRequest(request, internallyRequestedStreams);
4443
4444 /* Capture 2X frame*/
4445 modified_meta = modified_settings;
4446 expCompensation = GB_HDR_2X_STEP_EV;
4447 aeLock = 1;
4448 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4449 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4450 modified_settings = modified_meta.release();
4451 request->settings = modified_settings;
4452
4453 itr = internallyRequestedStreams.begin();
4454 if (itr == internallyRequestedStreams.end()) {
4455 ALOGE("Error Internally Requested Stream list is empty");
4456 assert(0);
4457 } else {
4458 itr->need_metadata = 0;
4459 itr->meteringOnly = 1;
4460 }
4461 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4462 request->frame_number = internalFrameNumber;
4463 processCaptureRequest(request, internallyRequestedStreams);
4464
4465 itr = internallyRequestedStreams.begin();
4466 if (itr == internallyRequestedStreams.end()) {
4467 ALOGE("Error Internally Requested Stream list is empty");
4468 assert(0);
4469 } else {
4470 itr->need_metadata = 1;
4471 itr->meteringOnly = 0;
4472 }
4473
4474 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4475 request->frame_number = internalFrameNumber;
4476 processCaptureRequest(request, internallyRequestedStreams);
4477
4478
4479 /* Capture 2X on original streaming config*/
4480 internallyRequestedStreams.clear();
4481
4482 /* Restore original settings pointer */
4483 request->settings = original_settings;
4484 } else {
4485 uint32_t internalFrameNumber;
4486 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4487 request->frame_number = internalFrameNumber;
4488 return processCaptureRequest(request, internallyRequestedStreams);
4489 }
4490
4491 return NO_ERROR;
4492}
4493
4494/*===========================================================================
4495 * FUNCTION : orchestrateResult
4496 *
4497 * DESCRIPTION: Orchestrates a capture result to camera service
4498 *
4499 * PARAMETERS :
4500 * @request : request from framework to process
4501 *
4502 * RETURN :
4503 *
4504 *==========================================================================*/
4505void QCamera3HardwareInterface::orchestrateResult(
4506 camera3_capture_result_t *result)
4507{
4508 uint32_t frameworkFrameNumber;
4509 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4510 frameworkFrameNumber);
4511 if (rc != NO_ERROR) {
4512 LOGE("Cannot find translated frameworkFrameNumber");
4513 assert(0);
4514 } else {
4515 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004516 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004517 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004518 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004519 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4520 camera_metadata_entry_t entry;
4521 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4522 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004523 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004524 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4525 if (ret != OK)
4526 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004527 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004528 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004529 result->frame_number = frameworkFrameNumber;
4530 mCallbackOps->process_capture_result(mCallbackOps, result);
4531 }
4532 }
4533}
4534
4535/*===========================================================================
4536 * FUNCTION : orchestrateNotify
4537 *
4538 * DESCRIPTION: Orchestrates a notify to camera service
4539 *
4540 * PARAMETERS :
4541 * @request : request from framework to process
4542 *
4543 * RETURN :
4544 *
4545 *==========================================================================*/
4546void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4547{
4548 uint32_t frameworkFrameNumber;
4549 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004550 int32_t rc = NO_ERROR;
4551
4552 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004553 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004554
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004555 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004556 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4557 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4558 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004559 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004560 LOGE("Cannot find translated frameworkFrameNumber");
4561 assert(0);
4562 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004563 }
4564 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004565
4566 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4567 LOGD("Internal Request drop the notifyCb");
4568 } else {
4569 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4570 mCallbackOps->notify(mCallbackOps, notify_msg);
4571 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004572}
4573
4574/*===========================================================================
4575 * FUNCTION : FrameNumberRegistry
4576 *
4577 * DESCRIPTION: Constructor
4578 *
4579 * PARAMETERS :
4580 *
4581 * RETURN :
4582 *
4583 *==========================================================================*/
4584FrameNumberRegistry::FrameNumberRegistry()
4585{
4586 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4587}
4588
4589/*===========================================================================
4590 * FUNCTION : ~FrameNumberRegistry
4591 *
4592 * DESCRIPTION: Destructor
4593 *
4594 * PARAMETERS :
4595 *
4596 * RETURN :
4597 *
4598 *==========================================================================*/
4599FrameNumberRegistry::~FrameNumberRegistry()
4600{
4601}
4602
4603/*===========================================================================
4604 * FUNCTION : PurgeOldEntriesLocked
4605 *
4606 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4607 *
4608 * PARAMETERS :
4609 *
4610 * RETURN : NONE
4611 *
4612 *==========================================================================*/
4613void FrameNumberRegistry::purgeOldEntriesLocked()
4614{
4615 while (_register.begin() != _register.end()) {
4616 auto itr = _register.begin();
4617 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4618 _register.erase(itr);
4619 } else {
4620 return;
4621 }
4622 }
4623}
4624
4625/*===========================================================================
4626 * FUNCTION : allocStoreInternalFrameNumber
4627 *
4628 * DESCRIPTION: Method to note down a framework request and associate a new
4629 * internal request number against it
4630 *
4631 * PARAMETERS :
4632 * @fFrameNumber: Identifier given by framework
4633 * @internalFN : Output parameter which will have the newly generated internal
4634 * entry
4635 *
4636 * RETURN : Error code
4637 *
4638 *==========================================================================*/
4639int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4640 uint32_t &internalFrameNumber)
4641{
4642 Mutex::Autolock lock(mRegistryLock);
4643 internalFrameNumber = _nextFreeInternalNumber++;
4644 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4645 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4646 purgeOldEntriesLocked();
4647 return NO_ERROR;
4648}
4649
4650/*===========================================================================
4651 * FUNCTION : generateStoreInternalFrameNumber
4652 *
4653 * DESCRIPTION: Method to associate a new internal request number independent
4654 * of any associate with framework requests
4655 *
4656 * PARAMETERS :
4657 * @internalFrame#: Output parameter which will have the newly generated internal
4658 *
4659 *
4660 * RETURN : Error code
4661 *
4662 *==========================================================================*/
4663int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4664{
4665 Mutex::Autolock lock(mRegistryLock);
4666 internalFrameNumber = _nextFreeInternalNumber++;
4667 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4668 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4669 purgeOldEntriesLocked();
4670 return NO_ERROR;
4671}
4672
4673/*===========================================================================
4674 * FUNCTION : getFrameworkFrameNumber
4675 *
4676 * DESCRIPTION: Method to query the framework framenumber given an internal #
4677 *
4678 * PARAMETERS :
4679 * @internalFrame#: Internal reference
4680 * @frameworkframenumber: Output parameter holding framework frame entry
4681 *
4682 * RETURN : Error code
4683 *
4684 *==========================================================================*/
4685int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4686 uint32_t &frameworkFrameNumber)
4687{
4688 Mutex::Autolock lock(mRegistryLock);
4689 auto itr = _register.find(internalFrameNumber);
4690 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004691 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004692 return -ENOENT;
4693 }
4694
4695 frameworkFrameNumber = itr->second;
4696 purgeOldEntriesLocked();
4697 return NO_ERROR;
4698}
Thierry Strudel3d639192016-09-09 11:52:26 -07004699
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004700status_t QCamera3HardwareInterface::fillPbStreamConfig(
4701 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4702 QCamera3Channel *channel, uint32_t streamIndex) {
4703 if (config == nullptr) {
4704 LOGE("%s: config is null", __FUNCTION__);
4705 return BAD_VALUE;
4706 }
4707
4708 if (channel == nullptr) {
4709 LOGE("%s: channel is null", __FUNCTION__);
4710 return BAD_VALUE;
4711 }
4712
4713 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4714 if (stream == nullptr) {
4715 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4716 return NAME_NOT_FOUND;
4717 }
4718
4719 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4720 if (streamInfo == nullptr) {
4721 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4722 return NAME_NOT_FOUND;
4723 }
4724
4725 config->id = pbStreamId;
4726 config->image.width = streamInfo->dim.width;
4727 config->image.height = streamInfo->dim.height;
4728 config->image.padding = 0;
4729 config->image.format = pbStreamFormat;
4730
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004731 uint32_t totalPlaneSize = 0;
4732
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004733 // Fill plane information.
4734 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4735 pbcamera::PlaneConfiguration plane;
4736 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4737 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4738 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004739
4740 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004741 }
4742
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004743 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004744 return OK;
4745}
4746
Thierry Strudel3d639192016-09-09 11:52:26 -07004747/*===========================================================================
4748 * FUNCTION : processCaptureRequest
4749 *
4750 * DESCRIPTION: process a capture request from camera service
4751 *
4752 * PARAMETERS :
4753 * @request : request from framework to process
4754 *
4755 * RETURN :
4756 *
4757 *==========================================================================*/
4758int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004759 camera3_capture_request_t *request,
4760 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004761{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004762 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004763 int rc = NO_ERROR;
4764 int32_t request_id;
4765 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004766 bool isVidBufRequested = false;
4767 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004768 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004769
4770 pthread_mutex_lock(&mMutex);
4771
4772 // Validate current state
4773 switch (mState) {
4774 case CONFIGURED:
4775 case STARTED:
4776 /* valid state */
4777 break;
4778
4779 case ERROR:
4780 pthread_mutex_unlock(&mMutex);
4781 handleCameraDeviceError();
4782 return -ENODEV;
4783
4784 default:
4785 LOGE("Invalid state %d", mState);
4786 pthread_mutex_unlock(&mMutex);
4787 return -ENODEV;
4788 }
4789
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004790 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004791 if (rc != NO_ERROR) {
4792 LOGE("incoming request is not valid");
4793 pthread_mutex_unlock(&mMutex);
4794 return rc;
4795 }
4796
4797 meta = request->settings;
4798
4799 // For first capture request, send capture intent, and
4800 // stream on all streams
4801 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004802 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004803 // send an unconfigure to the backend so that the isp
4804 // resources are deallocated
4805 if (!mFirstConfiguration) {
4806 cam_stream_size_info_t stream_config_info;
4807 int32_t hal_version = CAM_HAL_V3;
4808 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4809 stream_config_info.buffer_info.min_buffers =
4810 MIN_INFLIGHT_REQUESTS;
4811 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004812 m_bIs4KVideo ? 0 :
4813 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004814 clear_metadata_buffer(mParameters);
4815 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4816 CAM_INTF_PARM_HAL_VERSION, hal_version);
4817 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4818 CAM_INTF_META_STREAM_INFO, stream_config_info);
4819 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4820 mParameters);
4821 if (rc < 0) {
4822 LOGE("set_parms for unconfigure failed");
4823 pthread_mutex_unlock(&mMutex);
4824 return rc;
4825 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004826
Thierry Strudel3d639192016-09-09 11:52:26 -07004827 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004828 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004829 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004830 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004831 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004832 property_get("persist.camera.is_type", is_type_value, "4");
4833 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4834 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4835 property_get("persist.camera.is_type_preview", is_type_value, "4");
4836 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4837 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004838
4839 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4840 int32_t hal_version = CAM_HAL_V3;
4841 uint8_t captureIntent =
4842 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4843 mCaptureIntent = captureIntent;
4844 clear_metadata_buffer(mParameters);
4845 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4846 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4847 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004848 if (mFirstConfiguration) {
4849 // configure instant AEC
4850 // Instant AEC is a session based parameter and it is needed only
4851 // once per complete session after open camera.
4852 // i.e. This is set only once for the first capture request, after open camera.
4853 setInstantAEC(meta);
4854 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004855 uint8_t fwkVideoStabMode=0;
4856 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4857 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4858 }
4859
Xue Tuecac74e2017-04-17 13:58:15 -07004860 // If EIS setprop is enabled then only turn it on for video/preview
4861 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004862 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 int32_t vsMode;
4864 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4865 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4866 rc = BAD_VALUE;
4867 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004868 LOGD("setEis %d", setEis);
4869 bool eis3Supported = false;
4870 size_t count = IS_TYPE_MAX;
4871 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4872 for (size_t i = 0; i < count; i++) {
4873 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4874 eis3Supported = true;
4875 break;
4876 }
4877 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004878
4879 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004880 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004881 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4882 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004883 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4884 is_type = isTypePreview;
4885 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4886 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4887 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004888 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004889 } else {
4890 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004891 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004892 } else {
4893 is_type = IS_TYPE_NONE;
4894 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004895 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004896 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004897 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4898 }
4899 }
4900
4901 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4902 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4903
Thierry Strudel54dc9782017-02-15 12:12:10 -08004904 //Disable tintless only if the property is set to 0
4905 memset(prop, 0, sizeof(prop));
4906 property_get("persist.camera.tintless.enable", prop, "1");
4907 int32_t tintless_value = atoi(prop);
4908
Thierry Strudel3d639192016-09-09 11:52:26 -07004909 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4910 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004911
Thierry Strudel3d639192016-09-09 11:52:26 -07004912 //Disable CDS for HFR mode or if DIS/EIS is on.
4913 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4914 //after every configure_stream
4915 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4916 (m_bIsVideo)) {
4917 int32_t cds = CAM_CDS_MODE_OFF;
4918 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4919 CAM_INTF_PARM_CDS_MODE, cds))
4920 LOGE("Failed to disable CDS for HFR mode");
4921
4922 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004923
4924 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4925 uint8_t* use_av_timer = NULL;
4926
4927 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004928 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004929 use_av_timer = &m_debug_avtimer;
4930 }
4931 else{
4932 use_av_timer =
4933 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004934 if (use_av_timer) {
4935 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4936 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937 }
4938
4939 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4940 rc = BAD_VALUE;
4941 }
4942 }
4943
Thierry Strudel3d639192016-09-09 11:52:26 -07004944 setMobicat();
4945
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004946 uint8_t nrMode = 0;
4947 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4948 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4949 }
4950
Thierry Strudel3d639192016-09-09 11:52:26 -07004951 /* Set fps and hfr mode while sending meta stream info so that sensor
4952 * can configure appropriate streaming mode */
4953 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004954 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4955 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004956 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4957 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004958 if (rc == NO_ERROR) {
4959 int32_t max_fps =
4960 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004961 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004962 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4963 }
4964 /* For HFR, more buffers are dequeued upfront to improve the performance */
4965 if (mBatchSize) {
4966 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4967 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4968 }
4969 }
4970 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004971 LOGE("setHalFpsRange failed");
4972 }
4973 }
4974 if (meta.exists(ANDROID_CONTROL_MODE)) {
4975 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4976 rc = extractSceneMode(meta, metaMode, mParameters);
4977 if (rc != NO_ERROR) {
4978 LOGE("extractSceneMode failed");
4979 }
4980 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004981 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004982
Thierry Strudel04e026f2016-10-10 11:27:36 -07004983 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4984 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4985 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4986 rc = setVideoHdrMode(mParameters, vhdr);
4987 if (rc != NO_ERROR) {
4988 LOGE("setVideoHDR is failed");
4989 }
4990 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004991
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004992 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004993 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004994 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004995 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4996 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4997 sensorModeFullFov)) {
4998 rc = BAD_VALUE;
4999 }
5000 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005001 //TODO: validate the arguments, HSV scenemode should have only the
5002 //advertised fps ranges
5003
5004 /*set the capture intent, hal version, tintless, stream info,
5005 *and disenable parameters to the backend*/
5006 LOGD("set_parms META_STREAM_INFO " );
5007 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005008 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5009 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005010 mStreamConfigInfo.type[i],
5011 mStreamConfigInfo.stream_sizes[i].width,
5012 mStreamConfigInfo.stream_sizes[i].height,
5013 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005014 mStreamConfigInfo.format[i],
5015 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005016 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005017
Thierry Strudel3d639192016-09-09 11:52:26 -07005018 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5019 mParameters);
5020 if (rc < 0) {
5021 LOGE("set_parms failed for hal version, stream info");
5022 }
5023
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005024 cam_sensor_mode_info_t sensorModeInfo = {};
5025 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005026 if (rc != NO_ERROR) {
5027 LOGE("Failed to get sensor output size");
5028 pthread_mutex_unlock(&mMutex);
5029 goto error_exit;
5030 }
5031
5032 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5033 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005034 sensorModeInfo.active_array_size.width,
5035 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005036
5037 /* Set batchmode before initializing channel. Since registerBuffer
5038 * internally initializes some of the channels, better set batchmode
5039 * even before first register buffer */
5040 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5041 it != mStreamInfo.end(); it++) {
5042 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5043 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5044 && mBatchSize) {
5045 rc = channel->setBatchSize(mBatchSize);
5046 //Disable per frame map unmap for HFR/batchmode case
5047 rc |= channel->setPerFrameMapUnmap(false);
5048 if (NO_ERROR != rc) {
5049 LOGE("Channel init failed %d", rc);
5050 pthread_mutex_unlock(&mMutex);
5051 goto error_exit;
5052 }
5053 }
5054 }
5055
5056 //First initialize all streams
5057 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5058 it != mStreamInfo.end(); it++) {
5059 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005060
5061 /* Initial value of NR mode is needed before stream on */
5062 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005063 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5064 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005065 setEis) {
5066 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5067 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5068 is_type = mStreamConfigInfo.is_type[i];
5069 break;
5070 }
5071 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005072 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005073 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005074 rc = channel->initialize(IS_TYPE_NONE);
5075 }
5076 if (NO_ERROR != rc) {
5077 LOGE("Channel initialization failed %d", rc);
5078 pthread_mutex_unlock(&mMutex);
5079 goto error_exit;
5080 }
5081 }
5082
5083 if (mRawDumpChannel) {
5084 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5085 if (rc != NO_ERROR) {
5086 LOGE("Error: Raw Dump Channel init failed");
5087 pthread_mutex_unlock(&mMutex);
5088 goto error_exit;
5089 }
5090 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005091 if (mHdrPlusRawSrcChannel) {
5092 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5093 if (rc != NO_ERROR) {
5094 LOGE("Error: HDR+ RAW Source Channel init failed");
5095 pthread_mutex_unlock(&mMutex);
5096 goto error_exit;
5097 }
5098 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005099 if (mSupportChannel) {
5100 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5101 if (rc < 0) {
5102 LOGE("Support channel initialization failed");
5103 pthread_mutex_unlock(&mMutex);
5104 goto error_exit;
5105 }
5106 }
5107 if (mAnalysisChannel) {
5108 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5109 if (rc < 0) {
5110 LOGE("Analysis channel initialization failed");
5111 pthread_mutex_unlock(&mMutex);
5112 goto error_exit;
5113 }
5114 }
5115 if (mDummyBatchChannel) {
5116 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5117 if (rc < 0) {
5118 LOGE("mDummyBatchChannel setBatchSize failed");
5119 pthread_mutex_unlock(&mMutex);
5120 goto error_exit;
5121 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005122 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 if (rc < 0) {
5124 LOGE("mDummyBatchChannel initialization failed");
5125 pthread_mutex_unlock(&mMutex);
5126 goto error_exit;
5127 }
5128 }
5129
5130 // Set bundle info
5131 rc = setBundleInfo();
5132 if (rc < 0) {
5133 LOGE("setBundleInfo failed %d", rc);
5134 pthread_mutex_unlock(&mMutex);
5135 goto error_exit;
5136 }
5137
5138 //update settings from app here
5139 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5140 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5141 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5142 }
5143 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5144 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5145 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5146 }
5147 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5148 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5149 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5150
5151 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5152 (mLinkedCameraId != mCameraId) ) {
5153 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5154 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005155 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005156 goto error_exit;
5157 }
5158 }
5159
5160 // add bundle related cameras
5161 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5162 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005163 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5164 &m_pDualCamCmdPtr->bundle_info;
5165 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005166 if (mIsDeviceLinked)
5167 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5168 else
5169 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5170
5171 pthread_mutex_lock(&gCamLock);
5172
5173 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5174 LOGE("Dualcam: Invalid Session Id ");
5175 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005176 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005177 goto error_exit;
5178 }
5179
5180 if (mIsMainCamera == 1) {
5181 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5182 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005183 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005184 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005185 // related session id should be session id of linked session
5186 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5187 } else {
5188 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5189 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005190 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005191 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005192 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5193 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005194 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005195 pthread_mutex_unlock(&gCamLock);
5196
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005197 rc = mCameraHandle->ops->set_dual_cam_cmd(
5198 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005199 if (rc < 0) {
5200 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005201 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005202 goto error_exit;
5203 }
5204 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005205 goto no_error;
5206error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005207 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005208 return rc;
5209no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005210 mWokenUpByDaemon = false;
5211 mPendingLiveRequest = 0;
5212 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005213 }
5214
5215 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005216 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005217
5218 if (mFlushPerf) {
5219 //we cannot accept any requests during flush
5220 LOGE("process_capture_request cannot proceed during flush");
5221 pthread_mutex_unlock(&mMutex);
5222 return NO_ERROR; //should return an error
5223 }
5224
5225 if (meta.exists(ANDROID_REQUEST_ID)) {
5226 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5227 mCurrentRequestId = request_id;
5228 LOGD("Received request with id: %d", request_id);
5229 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5230 LOGE("Unable to find request id field, \
5231 & no previous id available");
5232 pthread_mutex_unlock(&mMutex);
5233 return NAME_NOT_FOUND;
5234 } else {
5235 LOGD("Re-using old request id");
5236 request_id = mCurrentRequestId;
5237 }
5238
5239 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5240 request->num_output_buffers,
5241 request->input_buffer,
5242 frameNumber);
5243 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005244 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005245 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005246 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005247 uint32_t snapshotStreamId = 0;
5248 for (size_t i = 0; i < request->num_output_buffers; i++) {
5249 const camera3_stream_buffer_t& output = request->output_buffers[i];
5250 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5251
Emilian Peev7650c122017-01-19 08:24:33 -08005252 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5253 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005254 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005255 blob_request = 1;
5256 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5257 }
5258
5259 if (output.acquire_fence != -1) {
5260 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5261 close(output.acquire_fence);
5262 if (rc != OK) {
5263 LOGE("sync wait failed %d", rc);
5264 pthread_mutex_unlock(&mMutex);
5265 return rc;
5266 }
5267 }
5268
Emilian Peev0f3c3162017-03-15 12:57:46 +00005269 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5270 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005271 depthRequestPresent = true;
5272 continue;
5273 }
5274
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005275 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005276 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005277
5278 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5279 isVidBufRequested = true;
5280 }
5281 }
5282
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005283 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5284 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5285 itr++) {
5286 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5287 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5288 channel->getStreamID(channel->getStreamTypeMask());
5289
5290 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5291 isVidBufRequested = true;
5292 }
5293 }
5294
Thierry Strudel3d639192016-09-09 11:52:26 -07005295 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005296 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005297 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005298 }
5299 if (blob_request && mRawDumpChannel) {
5300 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005301 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005302 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005303 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005304 }
5305
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005306 {
5307 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5308 // Request a RAW buffer if
5309 // 1. mHdrPlusRawSrcChannel is valid.
5310 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5311 // 3. There is no pending HDR+ request.
5312 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5313 mHdrPlusPendingRequests.size() == 0) {
5314 streamsArray.stream_request[streamsArray.num_streams].streamID =
5315 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5316 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5317 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005318 }
5319
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005320 //extract capture intent
5321 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5322 mCaptureIntent =
5323 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5324 }
5325
5326 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5327 mCacMode =
5328 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5329 }
5330
5331 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005332 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005333
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005334 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005335 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005336 // If this request has a still capture intent, try to submit an HDR+ request.
5337 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5338 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5339 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5340 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005341 }
5342
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005343 if (hdrPlusRequest) {
5344 // For a HDR+ request, just set the frame parameters.
5345 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5346 if (rc < 0) {
5347 LOGE("fail to set frame parameters");
5348 pthread_mutex_unlock(&mMutex);
5349 return rc;
5350 }
5351 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005352 /* Parse the settings:
5353 * - For every request in NORMAL MODE
5354 * - For every request in HFR mode during preview only case
5355 * - For first request of every batch in HFR mode during video
5356 * recording. In batchmode the same settings except frame number is
5357 * repeated in each request of the batch.
5358 */
5359 if (!mBatchSize ||
5360 (mBatchSize && !isVidBufRequested) ||
5361 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005362 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005363 if (rc < 0) {
5364 LOGE("fail to set frame parameters");
5365 pthread_mutex_unlock(&mMutex);
5366 return rc;
5367 }
5368 }
5369 /* For batchMode HFR, setFrameParameters is not called for every
5370 * request. But only frame number of the latest request is parsed.
5371 * Keep track of first and last frame numbers in a batch so that
5372 * metadata for the frame numbers of batch can be duplicated in
5373 * handleBatchMetadta */
5374 if (mBatchSize) {
5375 if (!mToBeQueuedVidBufs) {
5376 //start of the batch
5377 mFirstFrameNumberInBatch = request->frame_number;
5378 }
5379 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5380 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5381 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005382 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005383 return BAD_VALUE;
5384 }
5385 }
5386 if (mNeedSensorRestart) {
5387 /* Unlock the mutex as restartSensor waits on the channels to be
5388 * stopped, which in turn calls stream callback functions -
5389 * handleBufferWithLock and handleMetadataWithLock */
5390 pthread_mutex_unlock(&mMutex);
5391 rc = dynamicUpdateMetaStreamInfo();
5392 if (rc != NO_ERROR) {
5393 LOGE("Restarting the sensor failed");
5394 return BAD_VALUE;
5395 }
5396 mNeedSensorRestart = false;
5397 pthread_mutex_lock(&mMutex);
5398 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005399 if(mResetInstantAEC) {
5400 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5401 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5402 mResetInstantAEC = false;
5403 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005404 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005405 if (request->input_buffer->acquire_fence != -1) {
5406 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5407 close(request->input_buffer->acquire_fence);
5408 if (rc != OK) {
5409 LOGE("input buffer sync wait failed %d", rc);
5410 pthread_mutex_unlock(&mMutex);
5411 return rc;
5412 }
5413 }
5414 }
5415
5416 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5417 mLastCustIntentFrmNum = frameNumber;
5418 }
5419 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005420 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005421 pendingRequestIterator latestRequest;
5422 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005423 pendingRequest.num_buffers = depthRequestPresent ?
5424 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005425 pendingRequest.request_id = request_id;
5426 pendingRequest.blob_request = blob_request;
5427 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005428 if (request->input_buffer) {
5429 pendingRequest.input_buffer =
5430 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5431 *(pendingRequest.input_buffer) = *(request->input_buffer);
5432 pInputBuffer = pendingRequest.input_buffer;
5433 } else {
5434 pendingRequest.input_buffer = NULL;
5435 pInputBuffer = NULL;
5436 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005437 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005438
5439 pendingRequest.pipeline_depth = 0;
5440 pendingRequest.partial_result_cnt = 0;
5441 extractJpegMetadata(mCurJpegMeta, request);
5442 pendingRequest.jpegMetadata = mCurJpegMeta;
5443 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005444 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005445 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5446 mHybridAeEnable =
5447 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5448 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005449
5450 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5451 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005452 /* DevCamDebug metadata processCaptureRequest */
5453 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5454 mDevCamDebugMetaEnable =
5455 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5456 }
5457 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5458 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005459
5460 //extract CAC info
5461 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5462 mCacMode =
5463 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5464 }
5465 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005466 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005467
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005468 // extract enableZsl info
5469 if (gExposeEnableZslKey) {
5470 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5471 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5472 mZslEnabled = pendingRequest.enableZsl;
5473 } else {
5474 pendingRequest.enableZsl = mZslEnabled;
5475 }
5476 }
5477
Thierry Strudel3d639192016-09-09 11:52:26 -07005478 PendingBuffersInRequest bufsForCurRequest;
5479 bufsForCurRequest.frame_number = frameNumber;
5480 // Mark current timestamp for the new request
5481 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005482 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005483
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005484 if (hdrPlusRequest) {
5485 // Save settings for this request.
5486 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5487 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5488
5489 // Add to pending HDR+ request queue.
5490 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5491 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5492
5493 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5494 }
5495
Thierry Strudel3d639192016-09-09 11:52:26 -07005496 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005497 if ((request->output_buffers[i].stream->data_space ==
5498 HAL_DATASPACE_DEPTH) &&
5499 (HAL_PIXEL_FORMAT_BLOB ==
5500 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005501 continue;
5502 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005503 RequestedBufferInfo requestedBuf;
5504 memset(&requestedBuf, 0, sizeof(requestedBuf));
5505 requestedBuf.stream = request->output_buffers[i].stream;
5506 requestedBuf.buffer = NULL;
5507 pendingRequest.buffers.push_back(requestedBuf);
5508
5509 // Add to buffer handle the pending buffers list
5510 PendingBufferInfo bufferInfo;
5511 bufferInfo.buffer = request->output_buffers[i].buffer;
5512 bufferInfo.stream = request->output_buffers[i].stream;
5513 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5514 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5515 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5516 frameNumber, bufferInfo.buffer,
5517 channel->getStreamTypeMask(), bufferInfo.stream->format);
5518 }
5519 // Add this request packet into mPendingBuffersMap
5520 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5521 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5522 mPendingBuffersMap.get_num_overall_buffers());
5523
5524 latestRequest = mPendingRequestsList.insert(
5525 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005526
5527 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5528 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005529 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005530 for (size_t i = 0; i < request->num_output_buffers; i++) {
5531 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5532 }
5533
Thierry Strudel3d639192016-09-09 11:52:26 -07005534 if(mFlush) {
5535 LOGI("mFlush is true");
5536 pthread_mutex_unlock(&mMutex);
5537 return NO_ERROR;
5538 }
5539
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005540 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5541 // channel.
5542 if (!hdrPlusRequest) {
5543 int indexUsed;
5544 // Notify metadata channel we receive a request
5545 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005546
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005547 if(request->input_buffer != NULL){
5548 LOGD("Input request, frame_number %d", frameNumber);
5549 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5550 if (NO_ERROR != rc) {
5551 LOGE("fail to set reproc parameters");
5552 pthread_mutex_unlock(&mMutex);
5553 return rc;
5554 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005555 }
5556
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005557 // Call request on other streams
5558 uint32_t streams_need_metadata = 0;
5559 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5560 for (size_t i = 0; i < request->num_output_buffers; i++) {
5561 const camera3_stream_buffer_t& output = request->output_buffers[i];
5562 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5563
5564 if (channel == NULL) {
5565 LOGW("invalid channel pointer for stream");
5566 continue;
5567 }
5568
5569 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5570 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5571 output.buffer, request->input_buffer, frameNumber);
5572 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005573 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005574 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5575 if (rc < 0) {
5576 LOGE("Fail to request on picture channel");
5577 pthread_mutex_unlock(&mMutex);
5578 return rc;
5579 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005580 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005581 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5582 assert(NULL != mDepthChannel);
5583 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005584
Emilian Peev7650c122017-01-19 08:24:33 -08005585 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5586 if (rc < 0) {
5587 LOGE("Fail to map on depth buffer");
5588 pthread_mutex_unlock(&mMutex);
5589 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005590 }
Emilian Peev7650c122017-01-19 08:24:33 -08005591 } else {
5592 LOGD("snapshot request with buffer %p, frame_number %d",
5593 output.buffer, frameNumber);
5594 if (!request->settings) {
5595 rc = channel->request(output.buffer, frameNumber,
5596 NULL, mPrevParameters, indexUsed);
5597 } else {
5598 rc = channel->request(output.buffer, frameNumber,
5599 NULL, mParameters, indexUsed);
5600 }
5601 if (rc < 0) {
5602 LOGE("Fail to request on picture channel");
5603 pthread_mutex_unlock(&mMutex);
5604 return rc;
5605 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005606
Emilian Peev7650c122017-01-19 08:24:33 -08005607 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5608 uint32_t j = 0;
5609 for (j = 0; j < streamsArray.num_streams; j++) {
5610 if (streamsArray.stream_request[j].streamID == streamId) {
5611 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5612 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5613 else
5614 streamsArray.stream_request[j].buf_index = indexUsed;
5615 break;
5616 }
5617 }
5618 if (j == streamsArray.num_streams) {
5619 LOGE("Did not find matching stream to update index");
5620 assert(0);
5621 }
5622
5623 pendingBufferIter->need_metadata = true;
5624 streams_need_metadata++;
5625 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005626 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005627 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5628 bool needMetadata = false;
5629 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5630 rc = yuvChannel->request(output.buffer, frameNumber,
5631 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5632 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005633 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005634 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005635 pthread_mutex_unlock(&mMutex);
5636 return rc;
5637 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005638
5639 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5640 uint32_t j = 0;
5641 for (j = 0; j < streamsArray.num_streams; j++) {
5642 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005643 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5644 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5645 else
5646 streamsArray.stream_request[j].buf_index = indexUsed;
5647 break;
5648 }
5649 }
5650 if (j == streamsArray.num_streams) {
5651 LOGE("Did not find matching stream to update index");
5652 assert(0);
5653 }
5654
5655 pendingBufferIter->need_metadata = needMetadata;
5656 if (needMetadata)
5657 streams_need_metadata += 1;
5658 LOGD("calling YUV channel request, need_metadata is %d",
5659 needMetadata);
5660 } else {
5661 LOGD("request with buffer %p, frame_number %d",
5662 output.buffer, frameNumber);
5663
5664 rc = channel->request(output.buffer, frameNumber, indexUsed);
5665
5666 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5667 uint32_t j = 0;
5668 for (j = 0; j < streamsArray.num_streams; j++) {
5669 if (streamsArray.stream_request[j].streamID == streamId) {
5670 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5671 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5672 else
5673 streamsArray.stream_request[j].buf_index = indexUsed;
5674 break;
5675 }
5676 }
5677 if (j == streamsArray.num_streams) {
5678 LOGE("Did not find matching stream to update index");
5679 assert(0);
5680 }
5681
5682 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5683 && mBatchSize) {
5684 mToBeQueuedVidBufs++;
5685 if (mToBeQueuedVidBufs == mBatchSize) {
5686 channel->queueBatchBuf();
5687 }
5688 }
5689 if (rc < 0) {
5690 LOGE("request failed");
5691 pthread_mutex_unlock(&mMutex);
5692 return rc;
5693 }
5694 }
5695 pendingBufferIter++;
5696 }
5697
5698 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5699 itr++) {
5700 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5701
5702 if (channel == NULL) {
5703 LOGE("invalid channel pointer for stream");
5704 assert(0);
5705 return BAD_VALUE;
5706 }
5707
5708 InternalRequest requestedStream;
5709 requestedStream = (*itr);
5710
5711
5712 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5713 LOGD("snapshot request internally input buffer %p, frame_number %d",
5714 request->input_buffer, frameNumber);
5715 if(request->input_buffer != NULL){
5716 rc = channel->request(NULL, frameNumber,
5717 pInputBuffer, &mReprocMeta, indexUsed, true,
5718 requestedStream.meteringOnly);
5719 if (rc < 0) {
5720 LOGE("Fail to request on picture channel");
5721 pthread_mutex_unlock(&mMutex);
5722 return rc;
5723 }
5724 } else {
5725 LOGD("snapshot request with frame_number %d", frameNumber);
5726 if (!request->settings) {
5727 rc = channel->request(NULL, frameNumber,
5728 NULL, mPrevParameters, indexUsed, true,
5729 requestedStream.meteringOnly);
5730 } else {
5731 rc = channel->request(NULL, frameNumber,
5732 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5733 }
5734 if (rc < 0) {
5735 LOGE("Fail to request on picture channel");
5736 pthread_mutex_unlock(&mMutex);
5737 return rc;
5738 }
5739
5740 if ((*itr).meteringOnly != 1) {
5741 requestedStream.need_metadata = 1;
5742 streams_need_metadata++;
5743 }
5744 }
5745
5746 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5747 uint32_t j = 0;
5748 for (j = 0; j < streamsArray.num_streams; j++) {
5749 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005750 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5751 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5752 else
5753 streamsArray.stream_request[j].buf_index = indexUsed;
5754 break;
5755 }
5756 }
5757 if (j == streamsArray.num_streams) {
5758 LOGE("Did not find matching stream to update index");
5759 assert(0);
5760 }
5761
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005762 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005763 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005764 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005765 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005766 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005767 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005768 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005769
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005770 //If 2 streams have need_metadata set to true, fail the request, unless
5771 //we copy/reference count the metadata buffer
5772 if (streams_need_metadata > 1) {
5773 LOGE("not supporting request in which two streams requires"
5774 " 2 HAL metadata for reprocessing");
5775 pthread_mutex_unlock(&mMutex);
5776 return -EINVAL;
5777 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005778
Emilian Peev656e4fa2017-06-02 16:47:04 +01005779 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5780 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5781 if (depthRequestPresent && mDepthChannel) {
5782 if (request->settings) {
5783 camera_metadata_ro_entry entry;
5784 if (find_camera_metadata_ro_entry(request->settings,
5785 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5786 if (entry.data.u8[0]) {
5787 pdafEnable = CAM_PD_DATA_ENABLED;
5788 } else {
5789 pdafEnable = CAM_PD_DATA_SKIP;
5790 }
5791 mDepthCloudMode = pdafEnable;
5792 } else {
5793 pdafEnable = mDepthCloudMode;
5794 }
5795 } else {
5796 pdafEnable = mDepthCloudMode;
5797 }
5798 }
5799
Emilian Peev7650c122017-01-19 08:24:33 -08005800 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5801 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5802 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5803 pthread_mutex_unlock(&mMutex);
5804 return BAD_VALUE;
5805 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005806
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005807 if (request->input_buffer == NULL) {
5808 /* Set the parameters to backend:
5809 * - For every request in NORMAL MODE
5810 * - For every request in HFR mode during preview only case
5811 * - Once every batch in HFR mode during video recording
5812 */
5813 if (!mBatchSize ||
5814 (mBatchSize && !isVidBufRequested) ||
5815 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5816 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5817 mBatchSize, isVidBufRequested,
5818 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005819
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005820 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5821 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5822 uint32_t m = 0;
5823 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5824 if (streamsArray.stream_request[k].streamID ==
5825 mBatchedStreamsArray.stream_request[m].streamID)
5826 break;
5827 }
5828 if (m == mBatchedStreamsArray.num_streams) {
5829 mBatchedStreamsArray.stream_request\
5830 [mBatchedStreamsArray.num_streams].streamID =
5831 streamsArray.stream_request[k].streamID;
5832 mBatchedStreamsArray.stream_request\
5833 [mBatchedStreamsArray.num_streams].buf_index =
5834 streamsArray.stream_request[k].buf_index;
5835 mBatchedStreamsArray.num_streams =
5836 mBatchedStreamsArray.num_streams + 1;
5837 }
5838 }
5839 streamsArray = mBatchedStreamsArray;
5840 }
5841 /* Update stream id of all the requested buffers */
5842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5843 streamsArray)) {
5844 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005845 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005846 return BAD_VALUE;
5847 }
5848
5849 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5850 mParameters);
5851 if (rc < 0) {
5852 LOGE("set_parms failed");
5853 }
5854 /* reset to zero coz, the batch is queued */
5855 mToBeQueuedVidBufs = 0;
5856 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5857 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5858 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005859 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5860 uint32_t m = 0;
5861 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5862 if (streamsArray.stream_request[k].streamID ==
5863 mBatchedStreamsArray.stream_request[m].streamID)
5864 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005865 }
5866 if (m == mBatchedStreamsArray.num_streams) {
5867 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5868 streamID = streamsArray.stream_request[k].streamID;
5869 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5870 buf_index = streamsArray.stream_request[k].buf_index;
5871 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5872 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005873 }
5874 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005875 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005876
5877 // Start all streams after the first setting is sent, so that the
5878 // setting can be applied sooner: (0 + apply_delay)th frame.
5879 if (mState == CONFIGURED && mChannelHandle) {
5880 //Then start them.
5881 LOGH("Start META Channel");
5882 rc = mMetadataChannel->start();
5883 if (rc < 0) {
5884 LOGE("META channel start failed");
5885 pthread_mutex_unlock(&mMutex);
5886 return rc;
5887 }
5888
5889 if (mAnalysisChannel) {
5890 rc = mAnalysisChannel->start();
5891 if (rc < 0) {
5892 LOGE("Analysis channel start failed");
5893 mMetadataChannel->stop();
5894 pthread_mutex_unlock(&mMutex);
5895 return rc;
5896 }
5897 }
5898
5899 if (mSupportChannel) {
5900 rc = mSupportChannel->start();
5901 if (rc < 0) {
5902 LOGE("Support channel start failed");
5903 mMetadataChannel->stop();
5904 /* Although support and analysis are mutually exclusive today
5905 adding it in anycase for future proofing */
5906 if (mAnalysisChannel) {
5907 mAnalysisChannel->stop();
5908 }
5909 pthread_mutex_unlock(&mMutex);
5910 return rc;
5911 }
5912 }
5913 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5914 it != mStreamInfo.end(); it++) {
5915 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5916 LOGH("Start Processing Channel mask=%d",
5917 channel->getStreamTypeMask());
5918 rc = channel->start();
5919 if (rc < 0) {
5920 LOGE("channel start failed");
5921 pthread_mutex_unlock(&mMutex);
5922 return rc;
5923 }
5924 }
5925
5926 if (mRawDumpChannel) {
5927 LOGD("Starting raw dump stream");
5928 rc = mRawDumpChannel->start();
5929 if (rc != NO_ERROR) {
5930 LOGE("Error Starting Raw Dump Channel");
5931 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5932 it != mStreamInfo.end(); it++) {
5933 QCamera3Channel *channel =
5934 (QCamera3Channel *)(*it)->stream->priv;
5935 LOGH("Stopping Processing Channel mask=%d",
5936 channel->getStreamTypeMask());
5937 channel->stop();
5938 }
5939 if (mSupportChannel)
5940 mSupportChannel->stop();
5941 if (mAnalysisChannel) {
5942 mAnalysisChannel->stop();
5943 }
5944 mMetadataChannel->stop();
5945 pthread_mutex_unlock(&mMutex);
5946 return rc;
5947 }
5948 }
5949
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005950 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005951 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005952 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005953 if (rc != NO_ERROR) {
5954 LOGE("start_channel failed %d", rc);
5955 pthread_mutex_unlock(&mMutex);
5956 return rc;
5957 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005958
5959 {
5960 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005961 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005962
5963 // Now that sensor mode should have been selected, get the selected sensor mode
5964 // info.
5965 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5966 getCurrentSensorModeInfo(mSensorModeInfo);
5967
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005968 if (EaselManagerClientOpened) {
5969 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07005970 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5971 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005972 if (rc != OK) {
5973 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5974 mCameraId, mSensorModeInfo.op_pixel_clk);
5975 pthread_mutex_unlock(&mMutex);
5976 return rc;
5977 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005978 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005979 }
5980 }
5981
5982 // Start sensor streaming.
5983 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5984 mChannelHandle);
5985 if (rc != NO_ERROR) {
5986 LOGE("start_sensor_stream_on failed %d", rc);
5987 pthread_mutex_unlock(&mMutex);
5988 return rc;
5989 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005990 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005991 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005992 }
5993
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005994 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00005995 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005996 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07005997 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005998 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5999 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6000 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6001 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
6002 rc = enableHdrPlusModeLocked();
6003 if (rc != OK) {
6004 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6005 pthread_mutex_unlock(&mMutex);
6006 return rc;
6007 }
6008
6009 mFirstPreviewIntentSeen = true;
6010 }
6011 }
6012
Thierry Strudel3d639192016-09-09 11:52:26 -07006013 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6014
6015 mState = STARTED;
6016 // Added a timed condition wait
6017 struct timespec ts;
6018 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006019 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006020 if (rc < 0) {
6021 isValidTimeout = 0;
6022 LOGE("Error reading the real time clock!!");
6023 }
6024 else {
6025 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006026 int64_t timeout = 5;
6027 {
6028 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6029 // If there is a pending HDR+ request, the following requests may be blocked until the
6030 // HDR+ request is done. So allow a longer timeout.
6031 if (mHdrPlusPendingRequests.size() > 0) {
6032 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6033 }
6034 }
6035 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006036 }
6037 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006038 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006039 (mState != ERROR) && (mState != DEINIT)) {
6040 if (!isValidTimeout) {
6041 LOGD("Blocking on conditional wait");
6042 pthread_cond_wait(&mRequestCond, &mMutex);
6043 }
6044 else {
6045 LOGD("Blocking on timed conditional wait");
6046 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6047 if (rc == ETIMEDOUT) {
6048 rc = -ENODEV;
6049 LOGE("Unblocked on timeout!!!!");
6050 break;
6051 }
6052 }
6053 LOGD("Unblocked");
6054 if (mWokenUpByDaemon) {
6055 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006056 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006057 break;
6058 }
6059 }
6060 pthread_mutex_unlock(&mMutex);
6061
6062 return rc;
6063}
6064
6065/*===========================================================================
6066 * FUNCTION : dump
6067 *
6068 * DESCRIPTION:
6069 *
6070 * PARAMETERS :
6071 *
6072 *
6073 * RETURN :
6074 *==========================================================================*/
6075void QCamera3HardwareInterface::dump(int fd)
6076{
6077 pthread_mutex_lock(&mMutex);
6078 dprintf(fd, "\n Camera HAL3 information Begin \n");
6079
6080 dprintf(fd, "\nNumber of pending requests: %zu \n",
6081 mPendingRequestsList.size());
6082 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6083 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6084 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6085 for(pendingRequestIterator i = mPendingRequestsList.begin();
6086 i != mPendingRequestsList.end(); i++) {
6087 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6088 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6089 i->input_buffer);
6090 }
6091 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6092 mPendingBuffersMap.get_num_overall_buffers());
6093 dprintf(fd, "-------+------------------\n");
6094 dprintf(fd, " Frame | Stream type mask \n");
6095 dprintf(fd, "-------+------------------\n");
6096 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6097 for(auto &j : req.mPendingBufferList) {
6098 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6099 dprintf(fd, " %5d | %11d \n",
6100 req.frame_number, channel->getStreamTypeMask());
6101 }
6102 }
6103 dprintf(fd, "-------+------------------\n");
6104
6105 dprintf(fd, "\nPending frame drop list: %zu\n",
6106 mPendingFrameDropList.size());
6107 dprintf(fd, "-------+-----------\n");
6108 dprintf(fd, " Frame | Stream ID \n");
6109 dprintf(fd, "-------+-----------\n");
6110 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6111 i != mPendingFrameDropList.end(); i++) {
6112 dprintf(fd, " %5d | %9d \n",
6113 i->frame_number, i->stream_ID);
6114 }
6115 dprintf(fd, "-------+-----------\n");
6116
6117 dprintf(fd, "\n Camera HAL3 information End \n");
6118
6119 /* use dumpsys media.camera as trigger to send update debug level event */
6120 mUpdateDebugLevel = true;
6121 pthread_mutex_unlock(&mMutex);
6122 return;
6123}
6124
6125/*===========================================================================
6126 * FUNCTION : flush
6127 *
6128 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6129 * conditionally restarts channels
6130 *
6131 * PARAMETERS :
6132 * @ restartChannels: re-start all channels
6133 *
6134 *
6135 * RETURN :
6136 * 0 on success
6137 * Error code on failure
6138 *==========================================================================*/
6139int QCamera3HardwareInterface::flush(bool restartChannels)
6140{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006141 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006142 int32_t rc = NO_ERROR;
6143
6144 LOGD("Unblocking Process Capture Request");
6145 pthread_mutex_lock(&mMutex);
6146 mFlush = true;
6147 pthread_mutex_unlock(&mMutex);
6148
6149 rc = stopAllChannels();
6150 // unlink of dualcam
6151 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006152 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6153 &m_pDualCamCmdPtr->bundle_info;
6154 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6156 pthread_mutex_lock(&gCamLock);
6157
6158 if (mIsMainCamera == 1) {
6159 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6160 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006161 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006162 // related session id should be session id of linked session
6163 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6164 } else {
6165 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6166 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006167 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006168 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6169 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006170 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006171 pthread_mutex_unlock(&gCamLock);
6172
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006173 rc = mCameraHandle->ops->set_dual_cam_cmd(
6174 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006175 if (rc < 0) {
6176 LOGE("Dualcam: Unlink failed, but still proceed to close");
6177 }
6178 }
6179
6180 if (rc < 0) {
6181 LOGE("stopAllChannels failed");
6182 return rc;
6183 }
6184 if (mChannelHandle) {
6185 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6186 mChannelHandle);
6187 }
6188
6189 // Reset bundle info
6190 rc = setBundleInfo();
6191 if (rc < 0) {
6192 LOGE("setBundleInfo failed %d", rc);
6193 return rc;
6194 }
6195
6196 // Mutex Lock
6197 pthread_mutex_lock(&mMutex);
6198
6199 // Unblock process_capture_request
6200 mPendingLiveRequest = 0;
6201 pthread_cond_signal(&mRequestCond);
6202
6203 rc = notifyErrorForPendingRequests();
6204 if (rc < 0) {
6205 LOGE("notifyErrorForPendingRequests failed");
6206 pthread_mutex_unlock(&mMutex);
6207 return rc;
6208 }
6209
6210 mFlush = false;
6211
6212 // Start the Streams/Channels
6213 if (restartChannels) {
6214 rc = startAllChannels();
6215 if (rc < 0) {
6216 LOGE("startAllChannels failed");
6217 pthread_mutex_unlock(&mMutex);
6218 return rc;
6219 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006220 if (mChannelHandle) {
6221 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006222 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006223 if (rc < 0) {
6224 LOGE("start_channel failed");
6225 pthread_mutex_unlock(&mMutex);
6226 return rc;
6227 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006228 }
6229 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006230 pthread_mutex_unlock(&mMutex);
6231
6232 return 0;
6233}
6234
6235/*===========================================================================
6236 * FUNCTION : flushPerf
6237 *
6238 * DESCRIPTION: This is the performance optimization version of flush that does
6239 * not use stream off, rather flushes the system
6240 *
6241 * PARAMETERS :
6242 *
6243 *
6244 * RETURN : 0 : success
6245 * -EINVAL: input is malformed (device is not valid)
6246 * -ENODEV: if the device has encountered a serious error
6247 *==========================================================================*/
6248int QCamera3HardwareInterface::flushPerf()
6249{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006250 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006251 int32_t rc = 0;
6252 struct timespec timeout;
6253 bool timed_wait = false;
6254
6255 pthread_mutex_lock(&mMutex);
6256 mFlushPerf = true;
6257 mPendingBuffersMap.numPendingBufsAtFlush =
6258 mPendingBuffersMap.get_num_overall_buffers();
6259 LOGD("Calling flush. Wait for %d buffers to return",
6260 mPendingBuffersMap.numPendingBufsAtFlush);
6261
6262 /* send the flush event to the backend */
6263 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6264 if (rc < 0) {
6265 LOGE("Error in flush: IOCTL failure");
6266 mFlushPerf = false;
6267 pthread_mutex_unlock(&mMutex);
6268 return -ENODEV;
6269 }
6270
6271 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6272 LOGD("No pending buffers in HAL, return flush");
6273 mFlushPerf = false;
6274 pthread_mutex_unlock(&mMutex);
6275 return rc;
6276 }
6277
6278 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006279 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006280 if (rc < 0) {
6281 LOGE("Error reading the real time clock, cannot use timed wait");
6282 } else {
6283 timeout.tv_sec += FLUSH_TIMEOUT;
6284 timed_wait = true;
6285 }
6286
6287 //Block on conditional variable
6288 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6289 LOGD("Waiting on mBuffersCond");
6290 if (!timed_wait) {
6291 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6292 if (rc != 0) {
6293 LOGE("pthread_cond_wait failed due to rc = %s",
6294 strerror(rc));
6295 break;
6296 }
6297 } else {
6298 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6299 if (rc != 0) {
6300 LOGE("pthread_cond_timedwait failed due to rc = %s",
6301 strerror(rc));
6302 break;
6303 }
6304 }
6305 }
6306 if (rc != 0) {
6307 mFlushPerf = false;
6308 pthread_mutex_unlock(&mMutex);
6309 return -ENODEV;
6310 }
6311
6312 LOGD("Received buffers, now safe to return them");
6313
6314 //make sure the channels handle flush
6315 //currently only required for the picture channel to release snapshot resources
6316 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6317 it != mStreamInfo.end(); it++) {
6318 QCamera3Channel *channel = (*it)->channel;
6319 if (channel) {
6320 rc = channel->flush();
6321 if (rc) {
6322 LOGE("Flushing the channels failed with error %d", rc);
6323 // even though the channel flush failed we need to continue and
6324 // return the buffers we have to the framework, however the return
6325 // value will be an error
6326 rc = -ENODEV;
6327 }
6328 }
6329 }
6330
6331 /* notify the frameworks and send errored results */
6332 rc = notifyErrorForPendingRequests();
6333 if (rc < 0) {
6334 LOGE("notifyErrorForPendingRequests failed");
6335 pthread_mutex_unlock(&mMutex);
6336 return rc;
6337 }
6338
6339 //unblock process_capture_request
6340 mPendingLiveRequest = 0;
6341 unblockRequestIfNecessary();
6342
6343 mFlushPerf = false;
6344 pthread_mutex_unlock(&mMutex);
6345 LOGD ("Flush Operation complete. rc = %d", rc);
6346 return rc;
6347}
6348
6349/*===========================================================================
6350 * FUNCTION : handleCameraDeviceError
6351 *
6352 * DESCRIPTION: This function calls internal flush and notifies the error to
6353 * framework and updates the state variable.
6354 *
6355 * PARAMETERS : None
6356 *
6357 * RETURN : NO_ERROR on Success
6358 * Error code on failure
6359 *==========================================================================*/
6360int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6361{
6362 int32_t rc = NO_ERROR;
6363
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006364 {
6365 Mutex::Autolock lock(mFlushLock);
6366 pthread_mutex_lock(&mMutex);
6367 if (mState != ERROR) {
6368 //if mState != ERROR, nothing to be done
6369 pthread_mutex_unlock(&mMutex);
6370 return NO_ERROR;
6371 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006372 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006373
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006374 rc = flush(false /* restart channels */);
6375 if (NO_ERROR != rc) {
6376 LOGE("internal flush to handle mState = ERROR failed");
6377 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006378
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006379 pthread_mutex_lock(&mMutex);
6380 mState = DEINIT;
6381 pthread_mutex_unlock(&mMutex);
6382 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006383
6384 camera3_notify_msg_t notify_msg;
6385 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6386 notify_msg.type = CAMERA3_MSG_ERROR;
6387 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6388 notify_msg.message.error.error_stream = NULL;
6389 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006390 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006391
6392 return rc;
6393}
6394
6395/*===========================================================================
6396 * FUNCTION : captureResultCb
6397 *
6398 * DESCRIPTION: Callback handler for all capture result
6399 * (streams, as well as metadata)
6400 *
6401 * PARAMETERS :
6402 * @metadata : metadata information
6403 * @buffer : actual gralloc buffer to be returned to frameworks.
6404 * NULL if metadata.
6405 *
6406 * RETURN : NONE
6407 *==========================================================================*/
6408void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6409 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6410{
6411 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006412 pthread_mutex_lock(&mMutex);
6413 uint8_t batchSize = mBatchSize;
6414 pthread_mutex_unlock(&mMutex);
6415 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006416 handleBatchMetadata(metadata_buf,
6417 true /* free_and_bufdone_meta_buf */);
6418 } else { /* mBatchSize = 0 */
6419 hdrPlusPerfLock(metadata_buf);
6420 pthread_mutex_lock(&mMutex);
6421 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006422 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006423 true /* last urgent frame of batch metadata */,
6424 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006425 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006426 pthread_mutex_unlock(&mMutex);
6427 }
6428 } else if (isInputBuffer) {
6429 pthread_mutex_lock(&mMutex);
6430 handleInputBufferWithLock(frame_number);
6431 pthread_mutex_unlock(&mMutex);
6432 } else {
6433 pthread_mutex_lock(&mMutex);
6434 handleBufferWithLock(buffer, frame_number);
6435 pthread_mutex_unlock(&mMutex);
6436 }
6437 return;
6438}
6439
6440/*===========================================================================
6441 * FUNCTION : getReprocessibleOutputStreamId
6442 *
6443 * DESCRIPTION: Get source output stream id for the input reprocess stream
6444 * based on size and format, which would be the largest
6445 * output stream if an input stream exists.
6446 *
6447 * PARAMETERS :
6448 * @id : return the stream id if found
6449 *
6450 * RETURN : int32_t type of status
6451 * NO_ERROR -- success
6452 * none-zero failure code
6453 *==========================================================================*/
6454int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6455{
6456 /* check if any output or bidirectional stream with the same size and format
6457 and return that stream */
6458 if ((mInputStreamInfo.dim.width > 0) &&
6459 (mInputStreamInfo.dim.height > 0)) {
6460 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6461 it != mStreamInfo.end(); it++) {
6462
6463 camera3_stream_t *stream = (*it)->stream;
6464 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6465 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6466 (stream->format == mInputStreamInfo.format)) {
6467 // Usage flag for an input stream and the source output stream
6468 // may be different.
6469 LOGD("Found reprocessible output stream! %p", *it);
6470 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6471 stream->usage, mInputStreamInfo.usage);
6472
6473 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6474 if (channel != NULL && channel->mStreams[0]) {
6475 id = channel->mStreams[0]->getMyServerID();
6476 return NO_ERROR;
6477 }
6478 }
6479 }
6480 } else {
6481 LOGD("No input stream, so no reprocessible output stream");
6482 }
6483 return NAME_NOT_FOUND;
6484}
6485
6486/*===========================================================================
6487 * FUNCTION : lookupFwkName
6488 *
6489 * DESCRIPTION: In case the enum is not same in fwk and backend
6490 * make sure the parameter is correctly propogated
6491 *
6492 * PARAMETERS :
6493 * @arr : map between the two enums
6494 * @len : len of the map
6495 * @hal_name : name of the hal_parm to map
6496 *
6497 * RETURN : int type of status
6498 * fwk_name -- success
6499 * none-zero failure code
6500 *==========================================================================*/
6501template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6502 size_t len, halType hal_name)
6503{
6504
6505 for (size_t i = 0; i < len; i++) {
6506 if (arr[i].hal_name == hal_name) {
6507 return arr[i].fwk_name;
6508 }
6509 }
6510
6511 /* Not able to find matching framework type is not necessarily
6512 * an error case. This happens when mm-camera supports more attributes
6513 * than the frameworks do */
6514 LOGH("Cannot find matching framework type");
6515 return NAME_NOT_FOUND;
6516}
6517
6518/*===========================================================================
6519 * FUNCTION : lookupHalName
6520 *
6521 * DESCRIPTION: In case the enum is not same in fwk and backend
6522 * make sure the parameter is correctly propogated
6523 *
6524 * PARAMETERS :
6525 * @arr : map between the two enums
6526 * @len : len of the map
6527 * @fwk_name : name of the hal_parm to map
6528 *
6529 * RETURN : int32_t type of status
6530 * hal_name -- success
6531 * none-zero failure code
6532 *==========================================================================*/
6533template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6534 size_t len, fwkType fwk_name)
6535{
6536 for (size_t i = 0; i < len; i++) {
6537 if (arr[i].fwk_name == fwk_name) {
6538 return arr[i].hal_name;
6539 }
6540 }
6541
6542 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6543 return NAME_NOT_FOUND;
6544}
6545
6546/*===========================================================================
6547 * FUNCTION : lookupProp
6548 *
6549 * DESCRIPTION: lookup a value by its name
6550 *
6551 * PARAMETERS :
6552 * @arr : map between the two enums
6553 * @len : size of the map
6554 * @name : name to be looked up
6555 *
6556 * RETURN : Value if found
6557 * CAM_CDS_MODE_MAX if not found
6558 *==========================================================================*/
6559template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6560 size_t len, const char *name)
6561{
6562 if (name) {
6563 for (size_t i = 0; i < len; i++) {
6564 if (!strcmp(arr[i].desc, name)) {
6565 return arr[i].val;
6566 }
6567 }
6568 }
6569 return CAM_CDS_MODE_MAX;
6570}
6571
6572/*===========================================================================
6573 *
6574 * DESCRIPTION:
6575 *
6576 * PARAMETERS :
6577 * @metadata : metadata information from callback
6578 * @timestamp: metadata buffer timestamp
6579 * @request_id: request id
6580 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006581 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006582 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6583 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006584 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006585 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6586 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006587 *
6588 * RETURN : camera_metadata_t*
6589 * metadata in a format specified by fwk
6590 *==========================================================================*/
6591camera_metadata_t*
6592QCamera3HardwareInterface::translateFromHalMetadata(
6593 metadata_buffer_t *metadata,
6594 nsecs_t timestamp,
6595 int32_t request_id,
6596 const CameraMetadata& jpegMetadata,
6597 uint8_t pipeline_depth,
6598 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006599 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006600 /* DevCamDebug metadata translateFromHalMetadata argument */
6601 uint8_t DevCamDebug_meta_enable,
6602 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006603 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006604 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006605 bool lastMetadataInBatch,
6606 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006607{
6608 CameraMetadata camMetadata;
6609 camera_metadata_t *resultMetadata;
6610
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006611 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006612 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6613 * Timestamp is needed because it's used for shutter notify calculation.
6614 * */
6615 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6616 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006617 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006618 }
6619
Thierry Strudel3d639192016-09-09 11:52:26 -07006620 if (jpegMetadata.entryCount())
6621 camMetadata.append(jpegMetadata);
6622
6623 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6624 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6625 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6626 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006627 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006628 if (mBatchSize == 0) {
6629 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6630 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6631 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006632
Samuel Ha68ba5172016-12-15 18:41:12 -08006633 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6634 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6635 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6636 // DevCamDebug metadata translateFromHalMetadata AF
6637 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6638 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6639 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6640 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6641 }
6642 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6643 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6644 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6645 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6646 }
6647 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6648 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6649 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6650 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6651 }
6652 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6653 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6654 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6655 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6656 }
6657 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6658 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6659 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6660 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6661 }
6662 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6663 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6664 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6665 *DevCamDebug_af_monitor_pdaf_target_pos;
6666 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6667 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6668 }
6669 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6670 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6671 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6672 *DevCamDebug_af_monitor_pdaf_confidence;
6673 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6674 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6675 }
6676 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6677 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6678 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6679 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6680 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6681 }
6682 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6683 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6684 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6685 *DevCamDebug_af_monitor_tof_target_pos;
6686 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6687 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6688 }
6689 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6690 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6691 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6692 *DevCamDebug_af_monitor_tof_confidence;
6693 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6694 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6695 }
6696 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6697 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6698 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6699 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6700 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6701 }
6702 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6703 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6704 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6705 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6706 &fwk_DevCamDebug_af_monitor_type_select, 1);
6707 }
6708 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6709 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6710 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6711 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6712 &fwk_DevCamDebug_af_monitor_refocus, 1);
6713 }
6714 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6715 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6716 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6717 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6718 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6719 }
6720 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6721 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6722 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6723 *DevCamDebug_af_search_pdaf_target_pos;
6724 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6725 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6726 }
6727 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6728 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6729 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6730 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6731 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6732 }
6733 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6734 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6735 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6736 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6737 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6738 }
6739 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6740 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6741 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6742 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6743 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6744 }
6745 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6746 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6747 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6748 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6749 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6750 }
6751 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6752 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6753 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6754 *DevCamDebug_af_search_tof_target_pos;
6755 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6756 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6757 }
6758 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6759 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6760 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6761 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6762 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6763 }
6764 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6765 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6766 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6767 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6768 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6769 }
6770 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6771 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6772 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6773 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6774 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6775 }
6776 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6777 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6778 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6779 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6780 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6781 }
6782 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6783 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6784 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6785 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6786 &fwk_DevCamDebug_af_search_type_select, 1);
6787 }
6788 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6789 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6790 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6791 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6792 &fwk_DevCamDebug_af_search_next_pos, 1);
6793 }
6794 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6795 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6796 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6797 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6798 &fwk_DevCamDebug_af_search_target_pos, 1);
6799 }
6800 // DevCamDebug metadata translateFromHalMetadata AEC
6801 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6802 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6803 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6804 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6805 }
6806 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6807 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6808 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6809 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6810 }
6811 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6812 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6813 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6814 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6815 }
6816 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6817 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6818 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6819 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6820 }
6821 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6822 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6823 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6824 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6825 }
6826 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6827 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6828 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6829 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6832 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6833 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6834 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6835 }
6836 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6837 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6838 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6839 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6840 }
Samuel Ha34229982017-02-17 13:51:11 -08006841 // DevCamDebug metadata translateFromHalMetadata zzHDR
6842 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6843 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6844 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6845 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6846 }
6847 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6848 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006849 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006850 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6851 }
6852 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6853 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6854 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6855 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6856 }
6857 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6858 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006859 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006860 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6861 }
6862 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6863 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6864 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6865 *DevCamDebug_aec_hdr_sensitivity_ratio;
6866 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6867 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6868 }
6869 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6870 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6871 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6872 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6873 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6874 }
6875 // DevCamDebug metadata translateFromHalMetadata ADRC
6876 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6877 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6878 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6879 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6880 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6881 }
6882 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6883 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6884 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6885 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6886 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6887 }
6888 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6889 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6890 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6891 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6892 }
6893 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6894 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6895 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6896 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6897 }
6898 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6899 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6900 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6901 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6902 }
6903 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6904 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6905 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6906 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6907 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006908 // DevCamDebug metadata translateFromHalMetadata AWB
6909 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6910 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6911 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6912 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6913 }
6914 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6915 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6916 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6917 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6918 }
6919 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6920 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6921 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6922 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6923 }
6924 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6925 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6926 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6927 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6928 }
6929 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6930 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6931 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6932 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6933 }
6934 }
6935 // atrace_end(ATRACE_TAG_ALWAYS);
6936
Thierry Strudel3d639192016-09-09 11:52:26 -07006937 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6938 int64_t fwk_frame_number = *frame_number;
6939 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6940 }
6941
6942 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6943 int32_t fps_range[2];
6944 fps_range[0] = (int32_t)float_range->min_fps;
6945 fps_range[1] = (int32_t)float_range->max_fps;
6946 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6947 fps_range, 2);
6948 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6949 fps_range[0], fps_range[1]);
6950 }
6951
6952 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6953 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6954 }
6955
6956 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6957 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6958 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6959 *sceneMode);
6960 if (NAME_NOT_FOUND != val) {
6961 uint8_t fwkSceneMode = (uint8_t)val;
6962 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6963 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6964 fwkSceneMode);
6965 }
6966 }
6967
6968 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6969 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6970 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6971 }
6972
6973 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6974 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6975 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6976 }
6977
6978 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6979 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6980 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6981 }
6982
6983 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6984 CAM_INTF_META_EDGE_MODE, metadata) {
6985 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6986 }
6987
6988 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6989 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6990 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6991 }
6992
6993 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6994 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6995 }
6996
6997 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6998 if (0 <= *flashState) {
6999 uint8_t fwk_flashState = (uint8_t) *flashState;
7000 if (!gCamCapability[mCameraId]->flash_available) {
7001 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7002 }
7003 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7004 }
7005 }
7006
7007 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7008 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7009 if (NAME_NOT_FOUND != val) {
7010 uint8_t fwk_flashMode = (uint8_t)val;
7011 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7012 }
7013 }
7014
7015 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7016 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7017 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7018 }
7019
7020 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7021 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7022 }
7023
7024 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7025 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7026 }
7027
7028 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7029 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7030 }
7031
7032 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7033 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7034 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7035 }
7036
7037 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7038 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7039 LOGD("fwk_videoStab = %d", fwk_videoStab);
7040 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7041 } else {
7042 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7043 // and so hardcoding the Video Stab result to OFF mode.
7044 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7045 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007046 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007047 }
7048
7049 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7050 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7051 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7052 }
7053
7054 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7055 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7056 }
7057
Thierry Strudel3d639192016-09-09 11:52:26 -07007058 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7059 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007060 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007061
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007062 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7063 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007064
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007065 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007066 blackLevelAppliedPattern->cam_black_level[0],
7067 blackLevelAppliedPattern->cam_black_level[1],
7068 blackLevelAppliedPattern->cam_black_level[2],
7069 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007070 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7071 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007072
7073#ifndef USE_HAL_3_3
7074 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307075 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007076 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307077 fwk_blackLevelInd[0] /= 16.0;
7078 fwk_blackLevelInd[1] /= 16.0;
7079 fwk_blackLevelInd[2] /= 16.0;
7080 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007081 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7082 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007083#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007084 }
7085
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007086#ifndef USE_HAL_3_3
7087 // Fixed whitelevel is used by ISP/Sensor
7088 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7089 &gCamCapability[mCameraId]->white_level, 1);
7090#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007091
7092 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7093 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7094 int32_t scalerCropRegion[4];
7095 scalerCropRegion[0] = hScalerCropRegion->left;
7096 scalerCropRegion[1] = hScalerCropRegion->top;
7097 scalerCropRegion[2] = hScalerCropRegion->width;
7098 scalerCropRegion[3] = hScalerCropRegion->height;
7099
7100 // Adjust crop region from sensor output coordinate system to active
7101 // array coordinate system.
7102 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7103 scalerCropRegion[2], scalerCropRegion[3]);
7104
7105 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7106 }
7107
7108 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7109 LOGD("sensorExpTime = %lld", *sensorExpTime);
7110 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7111 }
7112
7113 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7114 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7115 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7116 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7117 }
7118
7119 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7120 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7121 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7122 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7123 sensorRollingShutterSkew, 1);
7124 }
7125
7126 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7127 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7128 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7129
7130 //calculate the noise profile based on sensitivity
7131 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7132 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7133 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7134 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7135 noise_profile[i] = noise_profile_S;
7136 noise_profile[i+1] = noise_profile_O;
7137 }
7138 LOGD("noise model entry (S, O) is (%f, %f)",
7139 noise_profile_S, noise_profile_O);
7140 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7141 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7142 }
7143
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007144#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007145 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007146 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007147 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007148 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007149 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7150 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7151 }
7152 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007153#endif
7154
Thierry Strudel3d639192016-09-09 11:52:26 -07007155 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7156 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7157 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7158 }
7159
7160 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7161 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7162 *faceDetectMode);
7163 if (NAME_NOT_FOUND != val) {
7164 uint8_t fwk_faceDetectMode = (uint8_t)val;
7165 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7166
7167 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7168 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7169 CAM_INTF_META_FACE_DETECTION, metadata) {
7170 uint8_t numFaces = MIN(
7171 faceDetectionInfo->num_faces_detected, MAX_ROI);
7172 int32_t faceIds[MAX_ROI];
7173 uint8_t faceScores[MAX_ROI];
7174 int32_t faceRectangles[MAX_ROI * 4];
7175 int32_t faceLandmarks[MAX_ROI * 6];
7176 size_t j = 0, k = 0;
7177
7178 for (size_t i = 0; i < numFaces; i++) {
7179 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7180 // Adjust crop region from sensor output coordinate system to active
7181 // array coordinate system.
7182 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7183 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7184 rect.width, rect.height);
7185
7186 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7187 faceRectangles+j, -1);
7188
Jason Lee8ce36fa2017-04-19 19:40:37 -07007189 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7190 "bottom-right (%d, %d)",
7191 faceDetectionInfo->frame_id, i,
7192 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7193 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7194
Thierry Strudel3d639192016-09-09 11:52:26 -07007195 j+= 4;
7196 }
7197 if (numFaces <= 0) {
7198 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7199 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7200 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7201 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7202 }
7203
7204 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7205 numFaces);
7206 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7207 faceRectangles, numFaces * 4U);
7208 if (fwk_faceDetectMode ==
7209 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7210 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7211 CAM_INTF_META_FACE_LANDMARK, metadata) {
7212
7213 for (size_t i = 0; i < numFaces; i++) {
7214 // Map the co-ordinate sensor output coordinate system to active
7215 // array coordinate system.
7216 mCropRegionMapper.toActiveArray(
7217 landmarks->face_landmarks[i].left_eye_center.x,
7218 landmarks->face_landmarks[i].left_eye_center.y);
7219 mCropRegionMapper.toActiveArray(
7220 landmarks->face_landmarks[i].right_eye_center.x,
7221 landmarks->face_landmarks[i].right_eye_center.y);
7222 mCropRegionMapper.toActiveArray(
7223 landmarks->face_landmarks[i].mouth_center.x,
7224 landmarks->face_landmarks[i].mouth_center.y);
7225
7226 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007227
7228 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7229 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7230 faceDetectionInfo->frame_id, i,
7231 faceLandmarks[k + LEFT_EYE_X],
7232 faceLandmarks[k + LEFT_EYE_Y],
7233 faceLandmarks[k + RIGHT_EYE_X],
7234 faceLandmarks[k + RIGHT_EYE_Y],
7235 faceLandmarks[k + MOUTH_X],
7236 faceLandmarks[k + MOUTH_Y]);
7237
Thierry Strudel04e026f2016-10-10 11:27:36 -07007238 k+= TOTAL_LANDMARK_INDICES;
7239 }
7240 } else {
7241 for (size_t i = 0; i < numFaces; i++) {
7242 setInvalidLandmarks(faceLandmarks+k);
7243 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007244 }
7245 }
7246
Jason Lee49619db2017-04-13 12:07:22 -07007247 for (size_t i = 0; i < numFaces; i++) {
7248 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7249
7250 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7251 faceDetectionInfo->frame_id, i, faceIds[i]);
7252 }
7253
Thierry Strudel3d639192016-09-09 11:52:26 -07007254 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7255 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7256 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007257 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007258 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7259 CAM_INTF_META_FACE_BLINK, metadata) {
7260 uint8_t detected[MAX_ROI];
7261 uint8_t degree[MAX_ROI * 2];
7262 for (size_t i = 0; i < numFaces; i++) {
7263 detected[i] = blinks->blink[i].blink_detected;
7264 degree[2 * i] = blinks->blink[i].left_blink;
7265 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007266
Jason Lee49619db2017-04-13 12:07:22 -07007267 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7268 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7269 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7270 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007271 }
7272 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7273 detected, numFaces);
7274 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7275 degree, numFaces * 2);
7276 }
7277 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7278 CAM_INTF_META_FACE_SMILE, metadata) {
7279 uint8_t degree[MAX_ROI];
7280 uint8_t confidence[MAX_ROI];
7281 for (size_t i = 0; i < numFaces; i++) {
7282 degree[i] = smiles->smile[i].smile_degree;
7283 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007284
Jason Lee49619db2017-04-13 12:07:22 -07007285 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7286 "smile_degree=%d, smile_score=%d",
7287 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007288 }
7289 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7290 degree, numFaces);
7291 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7292 confidence, numFaces);
7293 }
7294 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7295 CAM_INTF_META_FACE_GAZE, metadata) {
7296 int8_t angle[MAX_ROI];
7297 int32_t direction[MAX_ROI * 3];
7298 int8_t degree[MAX_ROI * 2];
7299 for (size_t i = 0; i < numFaces; i++) {
7300 angle[i] = gazes->gaze[i].gaze_angle;
7301 direction[3 * i] = gazes->gaze[i].updown_dir;
7302 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7303 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7304 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7305 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007306
7307 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7308 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7309 "left_right_gaze=%d, top_bottom_gaze=%d",
7310 faceDetectionInfo->frame_id, i, angle[i],
7311 direction[3 * i], direction[3 * i + 1],
7312 direction[3 * i + 2],
7313 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007314 }
7315 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7316 (uint8_t *)angle, numFaces);
7317 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7318 direction, numFaces * 3);
7319 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7320 (uint8_t *)degree, numFaces * 2);
7321 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007322 }
7323 }
7324 }
7325 }
7326
7327 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7328 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007329 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007330 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007331 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007332
Shuzhen Wang14415f52016-11-16 18:26:18 -08007333 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7334 histogramBins = *histBins;
7335 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7336 }
7337
7338 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007339 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7340 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007341 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007342
7343 switch (stats_data->type) {
7344 case CAM_HISTOGRAM_TYPE_BAYER:
7345 switch (stats_data->bayer_stats.data_type) {
7346 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007347 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7348 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007349 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007350 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7351 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007352 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007353 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7354 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007355 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007356 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007357 case CAM_STATS_CHANNEL_R:
7358 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007359 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7360 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007361 }
7362 break;
7363 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007364 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007365 break;
7366 }
7367
Shuzhen Wang14415f52016-11-16 18:26:18 -08007368 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007369 }
7370 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007371 }
7372
7373 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7374 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7375 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7376 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7377 }
7378
7379 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7380 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7381 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7382 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7383 }
7384
7385 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7386 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7387 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7388 CAM_MAX_SHADING_MAP_HEIGHT);
7389 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7390 CAM_MAX_SHADING_MAP_WIDTH);
7391 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7392 lensShadingMap->lens_shading, 4U * map_width * map_height);
7393 }
7394
7395 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7396 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7397 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7398 }
7399
7400 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7401 //Populate CAM_INTF_META_TONEMAP_CURVES
7402 /* ch0 = G, ch 1 = B, ch 2 = R*/
7403 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7404 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7405 tonemap->tonemap_points_cnt,
7406 CAM_MAX_TONEMAP_CURVE_SIZE);
7407 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7408 }
7409
7410 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7411 &tonemap->curves[0].tonemap_points[0][0],
7412 tonemap->tonemap_points_cnt * 2);
7413
7414 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7415 &tonemap->curves[1].tonemap_points[0][0],
7416 tonemap->tonemap_points_cnt * 2);
7417
7418 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7419 &tonemap->curves[2].tonemap_points[0][0],
7420 tonemap->tonemap_points_cnt * 2);
7421 }
7422
7423 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7424 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7425 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7426 CC_GAIN_MAX);
7427 }
7428
7429 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7430 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7431 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7432 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7433 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7434 }
7435
7436 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7437 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7438 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7439 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7440 toneCurve->tonemap_points_cnt,
7441 CAM_MAX_TONEMAP_CURVE_SIZE);
7442 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7443 }
7444 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7445 (float*)toneCurve->curve.tonemap_points,
7446 toneCurve->tonemap_points_cnt * 2);
7447 }
7448
7449 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7450 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7451 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7452 predColorCorrectionGains->gains, 4);
7453 }
7454
7455 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7456 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7457 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7458 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7459 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7460 }
7461
7462 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7463 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7464 }
7465
7466 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7467 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7468 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7469 }
7470
7471 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7472 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7473 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7474 }
7475
7476 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7477 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7478 *effectMode);
7479 if (NAME_NOT_FOUND != val) {
7480 uint8_t fwk_effectMode = (uint8_t)val;
7481 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7482 }
7483 }
7484
7485 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7486 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7487 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7488 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7489 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7490 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7491 }
7492 int32_t fwk_testPatternData[4];
7493 fwk_testPatternData[0] = testPatternData->r;
7494 fwk_testPatternData[3] = testPatternData->b;
7495 switch (gCamCapability[mCameraId]->color_arrangement) {
7496 case CAM_FILTER_ARRANGEMENT_RGGB:
7497 case CAM_FILTER_ARRANGEMENT_GRBG:
7498 fwk_testPatternData[1] = testPatternData->gr;
7499 fwk_testPatternData[2] = testPatternData->gb;
7500 break;
7501 case CAM_FILTER_ARRANGEMENT_GBRG:
7502 case CAM_FILTER_ARRANGEMENT_BGGR:
7503 fwk_testPatternData[2] = testPatternData->gr;
7504 fwk_testPatternData[1] = testPatternData->gb;
7505 break;
7506 default:
7507 LOGE("color arrangement %d is not supported",
7508 gCamCapability[mCameraId]->color_arrangement);
7509 break;
7510 }
7511 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7512 }
7513
7514 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7515 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7516 }
7517
7518 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7519 String8 str((const char *)gps_methods);
7520 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7521 }
7522
7523 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7524 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7525 }
7526
7527 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7528 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7529 }
7530
7531 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7532 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7533 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7534 }
7535
7536 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7537 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7538 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7539 }
7540
7541 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7542 int32_t fwk_thumb_size[2];
7543 fwk_thumb_size[0] = thumb_size->width;
7544 fwk_thumb_size[1] = thumb_size->height;
7545 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7546 }
7547
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007548 // Skip reprocess metadata if there is no input stream.
7549 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7550 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7551 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7552 privateData,
7553 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7554 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007555 }
7556
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007557 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007558 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007559 meteringMode, 1);
7560 }
7561
Thierry Strudel54dc9782017-02-15 12:12:10 -08007562 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7563 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7564 LOGD("hdr_scene_data: %d %f\n",
7565 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7566 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7567 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7568 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7569 &isHdr, 1);
7570 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7571 &isHdrConfidence, 1);
7572 }
7573
7574
7575
Thierry Strudel3d639192016-09-09 11:52:26 -07007576 if (metadata->is_tuning_params_valid) {
7577 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7578 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7579 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7580
7581
7582 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7583 sizeof(uint32_t));
7584 data += sizeof(uint32_t);
7585
7586 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7587 sizeof(uint32_t));
7588 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7589 data += sizeof(uint32_t);
7590
7591 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7592 sizeof(uint32_t));
7593 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7594 data += sizeof(uint32_t);
7595
7596 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7597 sizeof(uint32_t));
7598 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7599 data += sizeof(uint32_t);
7600
7601 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7602 sizeof(uint32_t));
7603 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7604 data += sizeof(uint32_t);
7605
7606 metadata->tuning_params.tuning_mod3_data_size = 0;
7607 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7608 sizeof(uint32_t));
7609 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7610 data += sizeof(uint32_t);
7611
7612 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7613 TUNING_SENSOR_DATA_MAX);
7614 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7615 count);
7616 data += count;
7617
7618 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7619 TUNING_VFE_DATA_MAX);
7620 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7621 count);
7622 data += count;
7623
7624 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7625 TUNING_CPP_DATA_MAX);
7626 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7627 count);
7628 data += count;
7629
7630 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7631 TUNING_CAC_DATA_MAX);
7632 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7633 count);
7634 data += count;
7635
7636 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7637 (int32_t *)(void *)tuning_meta_data_blob,
7638 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7639 }
7640
7641 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7642 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7643 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7644 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7645 NEUTRAL_COL_POINTS);
7646 }
7647
7648 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7649 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7650 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7651 }
7652
7653 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7654 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7655 // Adjust crop region from sensor output coordinate system to active
7656 // array coordinate system.
7657 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7658 hAeRegions->rect.width, hAeRegions->rect.height);
7659
7660 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7661 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7662 REGIONS_TUPLE_COUNT);
7663 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7664 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7665 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7666 hAeRegions->rect.height);
7667 }
7668
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007669 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7670 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7671 if (NAME_NOT_FOUND != val) {
7672 uint8_t fwkAfMode = (uint8_t)val;
7673 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7674 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7675 } else {
7676 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7677 val);
7678 }
7679 }
7680
Thierry Strudel3d639192016-09-09 11:52:26 -07007681 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7682 uint8_t fwk_afState = (uint8_t) *afState;
7683 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007684 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007685 }
7686
7687 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7688 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7689 }
7690
7691 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7692 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7693 }
7694
7695 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7696 uint8_t fwk_lensState = *lensState;
7697 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7698 }
7699
Thierry Strudel3d639192016-09-09 11:52:26 -07007700
7701 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007702 uint32_t ab_mode = *hal_ab_mode;
7703 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7704 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7705 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7706 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007707 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007708 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007709 if (NAME_NOT_FOUND != val) {
7710 uint8_t fwk_ab_mode = (uint8_t)val;
7711 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7712 }
7713 }
7714
7715 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7716 int val = lookupFwkName(SCENE_MODES_MAP,
7717 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7718 if (NAME_NOT_FOUND != val) {
7719 uint8_t fwkBestshotMode = (uint8_t)val;
7720 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7721 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7722 } else {
7723 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7724 }
7725 }
7726
7727 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7728 uint8_t fwk_mode = (uint8_t) *mode;
7729 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7730 }
7731
7732 /* Constant metadata values to be update*/
7733 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7734 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7735
7736 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7737 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7738
7739 int32_t hotPixelMap[2];
7740 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7741
7742 // CDS
7743 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7744 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7745 }
7746
Thierry Strudel04e026f2016-10-10 11:27:36 -07007747 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7748 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007749 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007750 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7751 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7752 } else {
7753 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7754 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007755
7756 if(fwk_hdr != curr_hdr_state) {
7757 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7758 if(fwk_hdr)
7759 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7760 else
7761 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7762 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007763 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7764 }
7765
Thierry Strudel54dc9782017-02-15 12:12:10 -08007766 //binning correction
7767 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7768 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7769 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7770 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7771 }
7772
Thierry Strudel04e026f2016-10-10 11:27:36 -07007773 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007774 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007775 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7776 int8_t is_ir_on = 0;
7777
7778 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7779 if(is_ir_on != curr_ir_state) {
7780 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7781 if(is_ir_on)
7782 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7783 else
7784 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7785 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007786 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007787 }
7788
Thierry Strudel269c81a2016-10-12 12:13:59 -07007789 // AEC SPEED
7790 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7791 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7792 }
7793
7794 // AWB SPEED
7795 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7796 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7797 }
7798
Thierry Strudel3d639192016-09-09 11:52:26 -07007799 // TNR
7800 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7801 uint8_t tnr_enable = tnr->denoise_enable;
7802 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007803 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7804 int8_t is_tnr_on = 0;
7805
7806 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7807 if(is_tnr_on != curr_tnr_state) {
7808 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7809 if(is_tnr_on)
7810 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7811 else
7812 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7813 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007814
7815 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7816 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7817 }
7818
7819 // Reprocess crop data
7820 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7821 uint8_t cnt = crop_data->num_of_streams;
7822 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7823 // mm-qcamera-daemon only posts crop_data for streams
7824 // not linked to pproc. So no valid crop metadata is not
7825 // necessarily an error case.
7826 LOGD("No valid crop metadata entries");
7827 } else {
7828 uint32_t reproc_stream_id;
7829 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7830 LOGD("No reprocessible stream found, ignore crop data");
7831 } else {
7832 int rc = NO_ERROR;
7833 Vector<int32_t> roi_map;
7834 int32_t *crop = new int32_t[cnt*4];
7835 if (NULL == crop) {
7836 rc = NO_MEMORY;
7837 }
7838 if (NO_ERROR == rc) {
7839 int32_t streams_found = 0;
7840 for (size_t i = 0; i < cnt; i++) {
7841 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7842 if (pprocDone) {
7843 // HAL already does internal reprocessing,
7844 // either via reprocessing before JPEG encoding,
7845 // or offline postprocessing for pproc bypass case.
7846 crop[0] = 0;
7847 crop[1] = 0;
7848 crop[2] = mInputStreamInfo.dim.width;
7849 crop[3] = mInputStreamInfo.dim.height;
7850 } else {
7851 crop[0] = crop_data->crop_info[i].crop.left;
7852 crop[1] = crop_data->crop_info[i].crop.top;
7853 crop[2] = crop_data->crop_info[i].crop.width;
7854 crop[3] = crop_data->crop_info[i].crop.height;
7855 }
7856 roi_map.add(crop_data->crop_info[i].roi_map.left);
7857 roi_map.add(crop_data->crop_info[i].roi_map.top);
7858 roi_map.add(crop_data->crop_info[i].roi_map.width);
7859 roi_map.add(crop_data->crop_info[i].roi_map.height);
7860 streams_found++;
7861 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7862 crop[0], crop[1], crop[2], crop[3]);
7863 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7864 crop_data->crop_info[i].roi_map.left,
7865 crop_data->crop_info[i].roi_map.top,
7866 crop_data->crop_info[i].roi_map.width,
7867 crop_data->crop_info[i].roi_map.height);
7868 break;
7869
7870 }
7871 }
7872 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7873 &streams_found, 1);
7874 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7875 crop, (size_t)(streams_found * 4));
7876 if (roi_map.array()) {
7877 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7878 roi_map.array(), roi_map.size());
7879 }
7880 }
7881 if (crop) {
7882 delete [] crop;
7883 }
7884 }
7885 }
7886 }
7887
7888 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7889 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7890 // so hardcoding the CAC result to OFF mode.
7891 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7892 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7893 } else {
7894 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7895 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7896 *cacMode);
7897 if (NAME_NOT_FOUND != val) {
7898 uint8_t resultCacMode = (uint8_t)val;
7899 // check whether CAC result from CB is equal to Framework set CAC mode
7900 // If not equal then set the CAC mode came in corresponding request
7901 if (fwk_cacMode != resultCacMode) {
7902 resultCacMode = fwk_cacMode;
7903 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007904 //Check if CAC is disabled by property
7905 if (m_cacModeDisabled) {
7906 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7907 }
7908
Thierry Strudel3d639192016-09-09 11:52:26 -07007909 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7910 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7911 } else {
7912 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7913 }
7914 }
7915 }
7916
7917 // Post blob of cam_cds_data through vendor tag.
7918 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7919 uint8_t cnt = cdsInfo->num_of_streams;
7920 cam_cds_data_t cdsDataOverride;
7921 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7922 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7923 cdsDataOverride.num_of_streams = 1;
7924 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7925 uint32_t reproc_stream_id;
7926 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7927 LOGD("No reprocessible stream found, ignore cds data");
7928 } else {
7929 for (size_t i = 0; i < cnt; i++) {
7930 if (cdsInfo->cds_info[i].stream_id ==
7931 reproc_stream_id) {
7932 cdsDataOverride.cds_info[0].cds_enable =
7933 cdsInfo->cds_info[i].cds_enable;
7934 break;
7935 }
7936 }
7937 }
7938 } else {
7939 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7940 }
7941 camMetadata.update(QCAMERA3_CDS_INFO,
7942 (uint8_t *)&cdsDataOverride,
7943 sizeof(cam_cds_data_t));
7944 }
7945
7946 // Ldaf calibration data
7947 if (!mLdafCalibExist) {
7948 IF_META_AVAILABLE(uint32_t, ldafCalib,
7949 CAM_INTF_META_LDAF_EXIF, metadata) {
7950 mLdafCalibExist = true;
7951 mLdafCalib[0] = ldafCalib[0];
7952 mLdafCalib[1] = ldafCalib[1];
7953 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7954 ldafCalib[0], ldafCalib[1]);
7955 }
7956 }
7957
Thierry Strudel54dc9782017-02-15 12:12:10 -08007958 // EXIF debug data through vendor tag
7959 /*
7960 * Mobicat Mask can assume 3 values:
7961 * 1 refers to Mobicat data,
7962 * 2 refers to Stats Debug and Exif Debug Data
7963 * 3 refers to Mobicat and Stats Debug Data
7964 * We want to make sure that we are sending Exif debug data
7965 * only when Mobicat Mask is 2.
7966 */
7967 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7968 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7969 (uint8_t *)(void *)mExifParams.debug_params,
7970 sizeof(mm_jpeg_debug_exif_params_t));
7971 }
7972
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007973 // Reprocess and DDM debug data through vendor tag
7974 cam_reprocess_info_t repro_info;
7975 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007976 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7977 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007978 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007979 }
7980 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7981 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007982 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007983 }
7984 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7985 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007986 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007987 }
7988 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7989 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007990 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007991 }
7992 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7993 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007994 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007995 }
7996 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007997 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007998 }
7999 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8000 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008001 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008002 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008003 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8004 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8005 }
8006 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8007 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8008 }
8009 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8010 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008011
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008012 // INSTANT AEC MODE
8013 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8014 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8015 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8016 }
8017
Shuzhen Wange763e802016-03-31 10:24:29 -07008018 // AF scene change
8019 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8020 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8021 }
8022
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008023 // Enable ZSL
8024 if (enableZsl != nullptr) {
8025 uint8_t value = *enableZsl ?
8026 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8027 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8028 }
8029
Xu Han821ea9c2017-05-23 09:00:40 -07008030 // OIS Data
8031 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8032 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8033 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8034 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8035 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8036 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8037 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8038 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8039 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8040 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8041 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8042 }
8043
Thierry Strudel3d639192016-09-09 11:52:26 -07008044 resultMetadata = camMetadata.release();
8045 return resultMetadata;
8046}
8047
8048/*===========================================================================
8049 * FUNCTION : saveExifParams
8050 *
8051 * DESCRIPTION:
8052 *
8053 * PARAMETERS :
8054 * @metadata : metadata information from callback
8055 *
8056 * RETURN : none
8057 *
8058 *==========================================================================*/
8059void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8060{
8061 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8062 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8063 if (mExifParams.debug_params) {
8064 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8065 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8066 }
8067 }
8068 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8069 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8070 if (mExifParams.debug_params) {
8071 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8072 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8073 }
8074 }
8075 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8076 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8077 if (mExifParams.debug_params) {
8078 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8079 mExifParams.debug_params->af_debug_params_valid = TRUE;
8080 }
8081 }
8082 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8083 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8084 if (mExifParams.debug_params) {
8085 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8086 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8087 }
8088 }
8089 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8090 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8091 if (mExifParams.debug_params) {
8092 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8093 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8094 }
8095 }
8096 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8097 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8098 if (mExifParams.debug_params) {
8099 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8100 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8101 }
8102 }
8103 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8104 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8105 if (mExifParams.debug_params) {
8106 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8107 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8108 }
8109 }
8110 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8111 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8112 if (mExifParams.debug_params) {
8113 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8114 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8115 }
8116 }
8117}
8118
8119/*===========================================================================
8120 * FUNCTION : get3AExifParams
8121 *
8122 * DESCRIPTION:
8123 *
8124 * PARAMETERS : none
8125 *
8126 *
8127 * RETURN : mm_jpeg_exif_params_t
8128 *
8129 *==========================================================================*/
8130mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8131{
8132 return mExifParams;
8133}
8134
8135/*===========================================================================
8136 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8137 *
8138 * DESCRIPTION:
8139 *
8140 * PARAMETERS :
8141 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008142 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8143 * urgent metadata in a batch. Always true for
8144 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008145 *
8146 * RETURN : camera_metadata_t*
8147 * metadata in a format specified by fwk
8148 *==========================================================================*/
8149camera_metadata_t*
8150QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008151 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008152{
8153 CameraMetadata camMetadata;
8154 camera_metadata_t *resultMetadata;
8155
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008156 if (!lastUrgentMetadataInBatch) {
8157 /* In batch mode, use empty metadata if this is not the last in batch
8158 */
8159 resultMetadata = allocate_camera_metadata(0, 0);
8160 return resultMetadata;
8161 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008162
8163 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8164 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8165 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8166 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8167 }
8168
8169 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8170 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8171 &aecTrigger->trigger, 1);
8172 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8173 &aecTrigger->trigger_id, 1);
8174 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8175 aecTrigger->trigger);
8176 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8177 aecTrigger->trigger_id);
8178 }
8179
8180 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8181 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8182 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8183 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8184 }
8185
Thierry Strudel3d639192016-09-09 11:52:26 -07008186 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8187 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8188 &af_trigger->trigger, 1);
8189 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8190 af_trigger->trigger);
8191 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8192 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8193 af_trigger->trigger_id);
8194 }
8195
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008196 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8197 /*af regions*/
8198 int32_t afRegions[REGIONS_TUPLE_COUNT];
8199 // Adjust crop region from sensor output coordinate system to active
8200 // array coordinate system.
8201 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8202 hAfRegions->rect.width, hAfRegions->rect.height);
8203
8204 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8205 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8206 REGIONS_TUPLE_COUNT);
8207 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8208 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8209 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8210 hAfRegions->rect.height);
8211 }
8212
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008213 // AF region confidence
8214 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8215 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8216 }
8217
Thierry Strudel3d639192016-09-09 11:52:26 -07008218 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8219 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8220 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8221 if (NAME_NOT_FOUND != val) {
8222 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8223 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8224 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8225 } else {
8226 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8227 }
8228 }
8229
8230 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8231 uint32_t aeMode = CAM_AE_MODE_MAX;
8232 int32_t flashMode = CAM_FLASH_MODE_MAX;
8233 int32_t redeye = -1;
8234 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8235 aeMode = *pAeMode;
8236 }
8237 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8238 flashMode = *pFlashMode;
8239 }
8240 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8241 redeye = *pRedeye;
8242 }
8243
8244 if (1 == redeye) {
8245 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8246 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8247 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8248 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8249 flashMode);
8250 if (NAME_NOT_FOUND != val) {
8251 fwk_aeMode = (uint8_t)val;
8252 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8253 } else {
8254 LOGE("Unsupported flash mode %d", flashMode);
8255 }
8256 } else if (aeMode == CAM_AE_MODE_ON) {
8257 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8258 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8259 } else if (aeMode == CAM_AE_MODE_OFF) {
8260 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8261 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008262 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8263 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8264 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008265 } else {
8266 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8267 "flashMode:%d, aeMode:%u!!!",
8268 redeye, flashMode, aeMode);
8269 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008270 if (mInstantAEC) {
8271 // Increment frame Idx count untill a bound reached for instant AEC.
8272 mInstantAecFrameIdxCount++;
8273 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8274 CAM_INTF_META_AEC_INFO, metadata) {
8275 LOGH("ae_params->settled = %d",ae_params->settled);
8276 // If AEC settled, or if number of frames reached bound value,
8277 // should reset instant AEC.
8278 if (ae_params->settled ||
8279 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8280 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8281 mInstantAEC = false;
8282 mResetInstantAEC = true;
8283 mInstantAecFrameIdxCount = 0;
8284 }
8285 }
8286 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008287 resultMetadata = camMetadata.release();
8288 return resultMetadata;
8289}
8290
8291/*===========================================================================
8292 * FUNCTION : dumpMetadataToFile
8293 *
8294 * DESCRIPTION: Dumps tuning metadata to file system
8295 *
8296 * PARAMETERS :
8297 * @meta : tuning metadata
8298 * @dumpFrameCount : current dump frame count
8299 * @enabled : Enable mask
8300 *
8301 *==========================================================================*/
8302void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8303 uint32_t &dumpFrameCount,
8304 bool enabled,
8305 const char *type,
8306 uint32_t frameNumber)
8307{
8308 //Some sanity checks
8309 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8310 LOGE("Tuning sensor data size bigger than expected %d: %d",
8311 meta.tuning_sensor_data_size,
8312 TUNING_SENSOR_DATA_MAX);
8313 return;
8314 }
8315
8316 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8317 LOGE("Tuning VFE data size bigger than expected %d: %d",
8318 meta.tuning_vfe_data_size,
8319 TUNING_VFE_DATA_MAX);
8320 return;
8321 }
8322
8323 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8324 LOGE("Tuning CPP data size bigger than expected %d: %d",
8325 meta.tuning_cpp_data_size,
8326 TUNING_CPP_DATA_MAX);
8327 return;
8328 }
8329
8330 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8331 LOGE("Tuning CAC data size bigger than expected %d: %d",
8332 meta.tuning_cac_data_size,
8333 TUNING_CAC_DATA_MAX);
8334 return;
8335 }
8336 //
8337
8338 if(enabled){
8339 char timeBuf[FILENAME_MAX];
8340 char buf[FILENAME_MAX];
8341 memset(buf, 0, sizeof(buf));
8342 memset(timeBuf, 0, sizeof(timeBuf));
8343 time_t current_time;
8344 struct tm * timeinfo;
8345 time (&current_time);
8346 timeinfo = localtime (&current_time);
8347 if (timeinfo != NULL) {
8348 strftime (timeBuf, sizeof(timeBuf),
8349 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8350 }
8351 String8 filePath(timeBuf);
8352 snprintf(buf,
8353 sizeof(buf),
8354 "%dm_%s_%d.bin",
8355 dumpFrameCount,
8356 type,
8357 frameNumber);
8358 filePath.append(buf);
8359 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8360 if (file_fd >= 0) {
8361 ssize_t written_len = 0;
8362 meta.tuning_data_version = TUNING_DATA_VERSION;
8363 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8364 written_len += write(file_fd, data, sizeof(uint32_t));
8365 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8366 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8367 written_len += write(file_fd, data, sizeof(uint32_t));
8368 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8369 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8370 written_len += write(file_fd, data, sizeof(uint32_t));
8371 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8372 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8373 written_len += write(file_fd, data, sizeof(uint32_t));
8374 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8375 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8376 written_len += write(file_fd, data, sizeof(uint32_t));
8377 meta.tuning_mod3_data_size = 0;
8378 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8379 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8380 written_len += write(file_fd, data, sizeof(uint32_t));
8381 size_t total_size = meta.tuning_sensor_data_size;
8382 data = (void *)((uint8_t *)&meta.data);
8383 written_len += write(file_fd, data, total_size);
8384 total_size = meta.tuning_vfe_data_size;
8385 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8386 written_len += write(file_fd, data, total_size);
8387 total_size = meta.tuning_cpp_data_size;
8388 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8389 written_len += write(file_fd, data, total_size);
8390 total_size = meta.tuning_cac_data_size;
8391 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8392 written_len += write(file_fd, data, total_size);
8393 close(file_fd);
8394 }else {
8395 LOGE("fail to open file for metadata dumping");
8396 }
8397 }
8398}
8399
8400/*===========================================================================
8401 * FUNCTION : cleanAndSortStreamInfo
8402 *
8403 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8404 * and sort them such that raw stream is at the end of the list
8405 * This is a workaround for camera daemon constraint.
8406 *
8407 * PARAMETERS : None
8408 *
8409 *==========================================================================*/
8410void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8411{
8412 List<stream_info_t *> newStreamInfo;
8413
8414 /*clean up invalid streams*/
8415 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8416 it != mStreamInfo.end();) {
8417 if(((*it)->status) == INVALID){
8418 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8419 delete channel;
8420 free(*it);
8421 it = mStreamInfo.erase(it);
8422 } else {
8423 it++;
8424 }
8425 }
8426
8427 // Move preview/video/callback/snapshot streams into newList
8428 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8429 it != mStreamInfo.end();) {
8430 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8431 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8432 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8433 newStreamInfo.push_back(*it);
8434 it = mStreamInfo.erase(it);
8435 } else
8436 it++;
8437 }
8438 // Move raw streams into newList
8439 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8440 it != mStreamInfo.end();) {
8441 newStreamInfo.push_back(*it);
8442 it = mStreamInfo.erase(it);
8443 }
8444
8445 mStreamInfo = newStreamInfo;
8446}
8447
8448/*===========================================================================
8449 * FUNCTION : extractJpegMetadata
8450 *
8451 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8452 * JPEG metadata is cached in HAL, and return as part of capture
8453 * result when metadata is returned from camera daemon.
8454 *
8455 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8456 * @request: capture request
8457 *
8458 *==========================================================================*/
8459void QCamera3HardwareInterface::extractJpegMetadata(
8460 CameraMetadata& jpegMetadata,
8461 const camera3_capture_request_t *request)
8462{
8463 CameraMetadata frame_settings;
8464 frame_settings = request->settings;
8465
8466 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8467 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8468 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8469 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8470
8471 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8472 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8473 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8474 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8475
8476 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8477 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8478 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8479 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8480
8481 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8482 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8483 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8484 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8485
8486 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8487 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8488 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8489 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8490
8491 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8492 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8493 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8494 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8495
8496 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8497 int32_t thumbnail_size[2];
8498 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8499 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8500 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8501 int32_t orientation =
8502 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008503 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008504 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8505 int32_t temp;
8506 temp = thumbnail_size[0];
8507 thumbnail_size[0] = thumbnail_size[1];
8508 thumbnail_size[1] = temp;
8509 }
8510 }
8511 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8512 thumbnail_size,
8513 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8514 }
8515
8516}
8517
8518/*===========================================================================
8519 * FUNCTION : convertToRegions
8520 *
8521 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8522 *
8523 * PARAMETERS :
8524 * @rect : cam_rect_t struct to convert
8525 * @region : int32_t destination array
8526 * @weight : if we are converting from cam_area_t, weight is valid
8527 * else weight = -1
8528 *
8529 *==========================================================================*/
8530void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8531 int32_t *region, int weight)
8532{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008533 region[FACE_LEFT] = rect.left;
8534 region[FACE_TOP] = rect.top;
8535 region[FACE_RIGHT] = rect.left + rect.width;
8536 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008537 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008538 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008539 }
8540}
8541
8542/*===========================================================================
8543 * FUNCTION : convertFromRegions
8544 *
8545 * DESCRIPTION: helper method to convert from array to cam_rect_t
8546 *
8547 * PARAMETERS :
8548 * @rect : cam_rect_t struct to convert
8549 * @region : int32_t destination array
8550 * @weight : if we are converting from cam_area_t, weight is valid
8551 * else weight = -1
8552 *
8553 *==========================================================================*/
8554void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008555 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008556{
Thierry Strudel3d639192016-09-09 11:52:26 -07008557 int32_t x_min = frame_settings.find(tag).data.i32[0];
8558 int32_t y_min = frame_settings.find(tag).data.i32[1];
8559 int32_t x_max = frame_settings.find(tag).data.i32[2];
8560 int32_t y_max = frame_settings.find(tag).data.i32[3];
8561 roi.weight = frame_settings.find(tag).data.i32[4];
8562 roi.rect.left = x_min;
8563 roi.rect.top = y_min;
8564 roi.rect.width = x_max - x_min;
8565 roi.rect.height = y_max - y_min;
8566}
8567
8568/*===========================================================================
8569 * FUNCTION : resetIfNeededROI
8570 *
8571 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8572 * crop region
8573 *
8574 * PARAMETERS :
8575 * @roi : cam_area_t struct to resize
8576 * @scalerCropRegion : cam_crop_region_t region to compare against
8577 *
8578 *
8579 *==========================================================================*/
8580bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8581 const cam_crop_region_t* scalerCropRegion)
8582{
8583 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8584 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8585 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8586 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8587
8588 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8589 * without having this check the calculations below to validate if the roi
8590 * is inside scalar crop region will fail resulting in the roi not being
8591 * reset causing algorithm to continue to use stale roi window
8592 */
8593 if (roi->weight == 0) {
8594 return true;
8595 }
8596
8597 if ((roi_x_max < scalerCropRegion->left) ||
8598 // right edge of roi window is left of scalar crop's left edge
8599 (roi_y_max < scalerCropRegion->top) ||
8600 // bottom edge of roi window is above scalar crop's top edge
8601 (roi->rect.left > crop_x_max) ||
8602 // left edge of roi window is beyond(right) of scalar crop's right edge
8603 (roi->rect.top > crop_y_max)){
8604 // top edge of roi windo is above scalar crop's top edge
8605 return false;
8606 }
8607 if (roi->rect.left < scalerCropRegion->left) {
8608 roi->rect.left = scalerCropRegion->left;
8609 }
8610 if (roi->rect.top < scalerCropRegion->top) {
8611 roi->rect.top = scalerCropRegion->top;
8612 }
8613 if (roi_x_max > crop_x_max) {
8614 roi_x_max = crop_x_max;
8615 }
8616 if (roi_y_max > crop_y_max) {
8617 roi_y_max = crop_y_max;
8618 }
8619 roi->rect.width = roi_x_max - roi->rect.left;
8620 roi->rect.height = roi_y_max - roi->rect.top;
8621 return true;
8622}
8623
8624/*===========================================================================
8625 * FUNCTION : convertLandmarks
8626 *
8627 * DESCRIPTION: helper method to extract the landmarks from face detection info
8628 *
8629 * PARAMETERS :
8630 * @landmark_data : input landmark data to be converted
8631 * @landmarks : int32_t destination array
8632 *
8633 *
8634 *==========================================================================*/
8635void QCamera3HardwareInterface::convertLandmarks(
8636 cam_face_landmarks_info_t landmark_data,
8637 int32_t *landmarks)
8638{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008639 if (landmark_data.is_left_eye_valid) {
8640 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8641 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8642 } else {
8643 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8644 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8645 }
8646
8647 if (landmark_data.is_right_eye_valid) {
8648 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8649 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8650 } else {
8651 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8652 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8653 }
8654
8655 if (landmark_data.is_mouth_valid) {
8656 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8657 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8658 } else {
8659 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8660 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8661 }
8662}
8663
8664/*===========================================================================
8665 * FUNCTION : setInvalidLandmarks
8666 *
8667 * DESCRIPTION: helper method to set invalid landmarks
8668 *
8669 * PARAMETERS :
8670 * @landmarks : int32_t destination array
8671 *
8672 *
8673 *==========================================================================*/
8674void QCamera3HardwareInterface::setInvalidLandmarks(
8675 int32_t *landmarks)
8676{
8677 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8678 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8679 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8680 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8681 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8682 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008683}
8684
8685#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008686
8687/*===========================================================================
8688 * FUNCTION : getCapabilities
8689 *
8690 * DESCRIPTION: query camera capability from back-end
8691 *
8692 * PARAMETERS :
8693 * @ops : mm-interface ops structure
8694 * @cam_handle : camera handle for which we need capability
8695 *
8696 * RETURN : ptr type of capability structure
8697 * capability for success
8698 * NULL for failure
8699 *==========================================================================*/
8700cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8701 uint32_t cam_handle)
8702{
8703 int rc = NO_ERROR;
8704 QCamera3HeapMemory *capabilityHeap = NULL;
8705 cam_capability_t *cap_ptr = NULL;
8706
8707 if (ops == NULL) {
8708 LOGE("Invalid arguments");
8709 return NULL;
8710 }
8711
8712 capabilityHeap = new QCamera3HeapMemory(1);
8713 if (capabilityHeap == NULL) {
8714 LOGE("creation of capabilityHeap failed");
8715 return NULL;
8716 }
8717
8718 /* Allocate memory for capability buffer */
8719 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8720 if(rc != OK) {
8721 LOGE("No memory for cappability");
8722 goto allocate_failed;
8723 }
8724
8725 /* Map memory for capability buffer */
8726 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8727
8728 rc = ops->map_buf(cam_handle,
8729 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8730 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8731 if(rc < 0) {
8732 LOGE("failed to map capability buffer");
8733 rc = FAILED_TRANSACTION;
8734 goto map_failed;
8735 }
8736
8737 /* Query Capability */
8738 rc = ops->query_capability(cam_handle);
8739 if(rc < 0) {
8740 LOGE("failed to query capability");
8741 rc = FAILED_TRANSACTION;
8742 goto query_failed;
8743 }
8744
8745 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8746 if (cap_ptr == NULL) {
8747 LOGE("out of memory");
8748 rc = NO_MEMORY;
8749 goto query_failed;
8750 }
8751
8752 memset(cap_ptr, 0, sizeof(cam_capability_t));
8753 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8754
8755 int index;
8756 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8757 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8758 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8759 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8760 }
8761
8762query_failed:
8763 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8764map_failed:
8765 capabilityHeap->deallocate();
8766allocate_failed:
8767 delete capabilityHeap;
8768
8769 if (rc != NO_ERROR) {
8770 return NULL;
8771 } else {
8772 return cap_ptr;
8773 }
8774}
8775
Thierry Strudel3d639192016-09-09 11:52:26 -07008776/*===========================================================================
8777 * FUNCTION : initCapabilities
8778 *
8779 * DESCRIPTION: initialize camera capabilities in static data struct
8780 *
8781 * PARAMETERS :
8782 * @cameraId : camera Id
8783 *
8784 * RETURN : int32_t type of status
8785 * NO_ERROR -- success
8786 * none-zero failure code
8787 *==========================================================================*/
8788int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8789{
8790 int rc = 0;
8791 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008792 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008793
8794 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8795 if (rc) {
8796 LOGE("camera_open failed. rc = %d", rc);
8797 goto open_failed;
8798 }
8799 if (!cameraHandle) {
8800 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8801 goto open_failed;
8802 }
8803
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008804 handle = get_main_camera_handle(cameraHandle->camera_handle);
8805 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8806 if (gCamCapability[cameraId] == NULL) {
8807 rc = FAILED_TRANSACTION;
8808 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008809 }
8810
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008811 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008812 if (is_dual_camera_by_idx(cameraId)) {
8813 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8814 gCamCapability[cameraId]->aux_cam_cap =
8815 getCapabilities(cameraHandle->ops, handle);
8816 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8817 rc = FAILED_TRANSACTION;
8818 free(gCamCapability[cameraId]);
8819 goto failed_op;
8820 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008821
8822 // Copy the main camera capability to main_cam_cap struct
8823 gCamCapability[cameraId]->main_cam_cap =
8824 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8825 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8826 LOGE("out of memory");
8827 rc = NO_MEMORY;
8828 goto failed_op;
8829 }
8830 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8831 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008832 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008833failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008834 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8835 cameraHandle = NULL;
8836open_failed:
8837 return rc;
8838}
8839
8840/*==========================================================================
8841 * FUNCTION : get3Aversion
8842 *
8843 * DESCRIPTION: get the Q3A S/W version
8844 *
8845 * PARAMETERS :
8846 * @sw_version: Reference of Q3A structure which will hold version info upon
8847 * return
8848 *
8849 * RETURN : None
8850 *
8851 *==========================================================================*/
8852void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8853{
8854 if(gCamCapability[mCameraId])
8855 sw_version = gCamCapability[mCameraId]->q3a_version;
8856 else
8857 LOGE("Capability structure NULL!");
8858}
8859
8860
8861/*===========================================================================
8862 * FUNCTION : initParameters
8863 *
8864 * DESCRIPTION: initialize camera parameters
8865 *
8866 * PARAMETERS :
8867 *
8868 * RETURN : int32_t type of status
8869 * NO_ERROR -- success
8870 * none-zero failure code
8871 *==========================================================================*/
8872int QCamera3HardwareInterface::initParameters()
8873{
8874 int rc = 0;
8875
8876 //Allocate Set Param Buffer
8877 mParamHeap = new QCamera3HeapMemory(1);
8878 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8879 if(rc != OK) {
8880 rc = NO_MEMORY;
8881 LOGE("Failed to allocate SETPARM Heap memory");
8882 delete mParamHeap;
8883 mParamHeap = NULL;
8884 return rc;
8885 }
8886
8887 //Map memory for parameters buffer
8888 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8889 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8890 mParamHeap->getFd(0),
8891 sizeof(metadata_buffer_t),
8892 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8893 if(rc < 0) {
8894 LOGE("failed to map SETPARM buffer");
8895 rc = FAILED_TRANSACTION;
8896 mParamHeap->deallocate();
8897 delete mParamHeap;
8898 mParamHeap = NULL;
8899 return rc;
8900 }
8901
8902 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8903
8904 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8905 return rc;
8906}
8907
8908/*===========================================================================
8909 * FUNCTION : deinitParameters
8910 *
8911 * DESCRIPTION: de-initialize camera parameters
8912 *
8913 * PARAMETERS :
8914 *
8915 * RETURN : NONE
8916 *==========================================================================*/
8917void QCamera3HardwareInterface::deinitParameters()
8918{
8919 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8920 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8921
8922 mParamHeap->deallocate();
8923 delete mParamHeap;
8924 mParamHeap = NULL;
8925
8926 mParameters = NULL;
8927
8928 free(mPrevParameters);
8929 mPrevParameters = NULL;
8930}
8931
8932/*===========================================================================
8933 * FUNCTION : calcMaxJpegSize
8934 *
8935 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8936 *
8937 * PARAMETERS :
8938 *
8939 * RETURN : max_jpeg_size
8940 *==========================================================================*/
8941size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8942{
8943 size_t max_jpeg_size = 0;
8944 size_t temp_width, temp_height;
8945 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8946 MAX_SIZES_CNT);
8947 for (size_t i = 0; i < count; i++) {
8948 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8949 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8950 if (temp_width * temp_height > max_jpeg_size ) {
8951 max_jpeg_size = temp_width * temp_height;
8952 }
8953 }
8954 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8955 return max_jpeg_size;
8956}
8957
8958/*===========================================================================
8959 * FUNCTION : getMaxRawSize
8960 *
8961 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8962 *
8963 * PARAMETERS :
8964 *
8965 * RETURN : Largest supported Raw Dimension
8966 *==========================================================================*/
8967cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8968{
8969 int max_width = 0;
8970 cam_dimension_t maxRawSize;
8971
8972 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8973 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8974 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8975 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8976 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8977 }
8978 }
8979 return maxRawSize;
8980}
8981
8982
8983/*===========================================================================
8984 * FUNCTION : calcMaxJpegDim
8985 *
8986 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8987 *
8988 * PARAMETERS :
8989 *
8990 * RETURN : max_jpeg_dim
8991 *==========================================================================*/
8992cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8993{
8994 cam_dimension_t max_jpeg_dim;
8995 cam_dimension_t curr_jpeg_dim;
8996 max_jpeg_dim.width = 0;
8997 max_jpeg_dim.height = 0;
8998 curr_jpeg_dim.width = 0;
8999 curr_jpeg_dim.height = 0;
9000 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9001 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9002 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9003 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9004 max_jpeg_dim.width * max_jpeg_dim.height ) {
9005 max_jpeg_dim.width = curr_jpeg_dim.width;
9006 max_jpeg_dim.height = curr_jpeg_dim.height;
9007 }
9008 }
9009 return max_jpeg_dim;
9010}
9011
9012/*===========================================================================
9013 * FUNCTION : addStreamConfig
9014 *
9015 * DESCRIPTION: adds the stream configuration to the array
9016 *
9017 * PARAMETERS :
9018 * @available_stream_configs : pointer to stream configuration array
9019 * @scalar_format : scalar format
9020 * @dim : configuration dimension
9021 * @config_type : input or output configuration type
9022 *
9023 * RETURN : NONE
9024 *==========================================================================*/
9025void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9026 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9027{
9028 available_stream_configs.add(scalar_format);
9029 available_stream_configs.add(dim.width);
9030 available_stream_configs.add(dim.height);
9031 available_stream_configs.add(config_type);
9032}
9033
9034/*===========================================================================
9035 * FUNCTION : suppportBurstCapture
9036 *
9037 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9038 *
9039 * PARAMETERS :
9040 * @cameraId : camera Id
9041 *
9042 * RETURN : true if camera supports BURST_CAPTURE
9043 * false otherwise
9044 *==========================================================================*/
9045bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9046{
9047 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9048 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9049 const int32_t highResWidth = 3264;
9050 const int32_t highResHeight = 2448;
9051
9052 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9053 // Maximum resolution images cannot be captured at >= 10fps
9054 // -> not supporting BURST_CAPTURE
9055 return false;
9056 }
9057
9058 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9059 // Maximum resolution images can be captured at >= 20fps
9060 // --> supporting BURST_CAPTURE
9061 return true;
9062 }
9063
9064 // Find the smallest highRes resolution, or largest resolution if there is none
9065 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9066 MAX_SIZES_CNT);
9067 size_t highRes = 0;
9068 while ((highRes + 1 < totalCnt) &&
9069 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9070 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9071 highResWidth * highResHeight)) {
9072 highRes++;
9073 }
9074 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9075 return true;
9076 } else {
9077 return false;
9078 }
9079}
9080
9081/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009082 * FUNCTION : getPDStatIndex
9083 *
9084 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9085 *
9086 * PARAMETERS :
9087 * @caps : camera capabilities
9088 *
9089 * RETURN : int32_t type
9090 * non-negative - on success
9091 * -1 - on failure
9092 *==========================================================================*/
9093int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9094 if (nullptr == caps) {
9095 return -1;
9096 }
9097
9098 uint32_t metaRawCount = caps->meta_raw_channel_count;
9099 int32_t ret = -1;
9100 for (size_t i = 0; i < metaRawCount; i++) {
9101 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9102 ret = i;
9103 break;
9104 }
9105 }
9106
9107 return ret;
9108}
9109
9110/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009111 * FUNCTION : initStaticMetadata
9112 *
9113 * DESCRIPTION: initialize the static metadata
9114 *
9115 * PARAMETERS :
9116 * @cameraId : camera Id
9117 *
9118 * RETURN : int32_t type of status
9119 * 0 -- success
9120 * non-zero failure code
9121 *==========================================================================*/
9122int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9123{
9124 int rc = 0;
9125 CameraMetadata staticInfo;
9126 size_t count = 0;
9127 bool limitedDevice = false;
9128 char prop[PROPERTY_VALUE_MAX];
9129 bool supportBurst = false;
9130
9131 supportBurst = supportBurstCapture(cameraId);
9132
9133 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9134 * guaranteed or if min fps of max resolution is less than 20 fps, its
9135 * advertised as limited device*/
9136 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9137 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9138 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9139 !supportBurst;
9140
9141 uint8_t supportedHwLvl = limitedDevice ?
9142 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009143#ifndef USE_HAL_3_3
9144 // LEVEL_3 - This device will support level 3.
9145 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9146#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009147 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009148#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009149
9150 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9151 &supportedHwLvl, 1);
9152
9153 bool facingBack = false;
9154 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9155 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9156 facingBack = true;
9157 }
9158 /*HAL 3 only*/
9159 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9160 &gCamCapability[cameraId]->min_focus_distance, 1);
9161
9162 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9163 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9164
9165 /*should be using focal lengths but sensor doesn't provide that info now*/
9166 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9167 &gCamCapability[cameraId]->focal_length,
9168 1);
9169
9170 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9171 gCamCapability[cameraId]->apertures,
9172 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9173
9174 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9175 gCamCapability[cameraId]->filter_densities,
9176 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9177
9178
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009179 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9180 size_t mode_count =
9181 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9182 for (size_t i = 0; i < mode_count; i++) {
9183 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9184 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009185 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009186 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009187
9188 int32_t lens_shading_map_size[] = {
9189 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9190 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9191 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9192 lens_shading_map_size,
9193 sizeof(lens_shading_map_size)/sizeof(int32_t));
9194
9195 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9196 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9197
9198 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9199 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9200
9201 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9202 &gCamCapability[cameraId]->max_frame_duration, 1);
9203
9204 camera_metadata_rational baseGainFactor = {
9205 gCamCapability[cameraId]->base_gain_factor.numerator,
9206 gCamCapability[cameraId]->base_gain_factor.denominator};
9207 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9208 &baseGainFactor, 1);
9209
9210 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9211 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9212
9213 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9214 gCamCapability[cameraId]->pixel_array_size.height};
9215 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9216 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9217
9218 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9219 gCamCapability[cameraId]->active_array_size.top,
9220 gCamCapability[cameraId]->active_array_size.width,
9221 gCamCapability[cameraId]->active_array_size.height};
9222 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9223 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9224
9225 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9226 &gCamCapability[cameraId]->white_level, 1);
9227
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009228 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9229 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9230 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009231 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009232 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009233
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009234#ifndef USE_HAL_3_3
9235 bool hasBlackRegions = false;
9236 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9237 LOGW("black_region_count: %d is bounded to %d",
9238 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9239 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9240 }
9241 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9242 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9243 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9244 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9245 }
9246 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9247 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9248 hasBlackRegions = true;
9249 }
9250#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009251 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9252 &gCamCapability[cameraId]->flash_charge_duration, 1);
9253
9254 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9255 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9256
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009257 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9258 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9259 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009260 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9261 &timestampSource, 1);
9262
Thierry Strudel54dc9782017-02-15 12:12:10 -08009263 //update histogram vendor data
9264 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009265 &gCamCapability[cameraId]->histogram_size, 1);
9266
Thierry Strudel54dc9782017-02-15 12:12:10 -08009267 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009268 &gCamCapability[cameraId]->max_histogram_count, 1);
9269
Shuzhen Wang14415f52016-11-16 18:26:18 -08009270 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9271 //so that app can request fewer number of bins than the maximum supported.
9272 std::vector<int32_t> histBins;
9273 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9274 histBins.push_back(maxHistBins);
9275 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9276 (maxHistBins & 0x1) == 0) {
9277 histBins.push_back(maxHistBins >> 1);
9278 maxHistBins >>= 1;
9279 }
9280 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9281 histBins.data(), histBins.size());
9282
Thierry Strudel3d639192016-09-09 11:52:26 -07009283 int32_t sharpness_map_size[] = {
9284 gCamCapability[cameraId]->sharpness_map_size.width,
9285 gCamCapability[cameraId]->sharpness_map_size.height};
9286
9287 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9288 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9289
9290 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9291 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9292
Emilian Peev0f3c3162017-03-15 12:57:46 +00009293 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9294 if (0 <= indexPD) {
9295 // Advertise PD stats data as part of the Depth capabilities
9296 int32_t depthWidth =
9297 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9298 int32_t depthHeight =
9299 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009300 int32_t depthStride =
9301 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009302 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9303 assert(0 < depthSamplesCount);
9304 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9305 &depthSamplesCount, 1);
9306
9307 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9308 depthHeight,
9309 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9310 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9311 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9312 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9313 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9314
9315 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9316 depthHeight, 33333333,
9317 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9318 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9319 depthMinDuration,
9320 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9321
9322 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9323 depthHeight, 0,
9324 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9325 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9326 depthStallDuration,
9327 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9328
9329 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9330 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009331
9332 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9333 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9334 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009335 }
9336
Thierry Strudel3d639192016-09-09 11:52:26 -07009337 int32_t scalar_formats[] = {
9338 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9339 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9340 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9341 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9342 HAL_PIXEL_FORMAT_RAW10,
9343 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009344 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9345 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9346 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009347
9348 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9349 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9350 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9351 count, MAX_SIZES_CNT, available_processed_sizes);
9352 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9353 available_processed_sizes, count * 2);
9354
9355 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9356 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9357 makeTable(gCamCapability[cameraId]->raw_dim,
9358 count, MAX_SIZES_CNT, available_raw_sizes);
9359 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9360 available_raw_sizes, count * 2);
9361
9362 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9363 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9364 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9365 count, MAX_SIZES_CNT, available_fps_ranges);
9366 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9367 available_fps_ranges, count * 2);
9368
9369 camera_metadata_rational exposureCompensationStep = {
9370 gCamCapability[cameraId]->exp_compensation_step.numerator,
9371 gCamCapability[cameraId]->exp_compensation_step.denominator};
9372 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9373 &exposureCompensationStep, 1);
9374
9375 Vector<uint8_t> availableVstabModes;
9376 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9377 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009378 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009379 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009380 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009381 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009382 count = IS_TYPE_MAX;
9383 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9384 for (size_t i = 0; i < count; i++) {
9385 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9386 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9387 eisSupported = true;
9388 break;
9389 }
9390 }
9391 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009392 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9393 }
9394 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9395 availableVstabModes.array(), availableVstabModes.size());
9396
9397 /*HAL 1 and HAL 3 common*/
9398 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9399 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9400 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009401 // Cap the max zoom to the max preferred value
9402 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009403 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9404 &maxZoom, 1);
9405
9406 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9407 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9408
9409 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9410 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9411 max3aRegions[2] = 0; /* AF not supported */
9412 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9413 max3aRegions, 3);
9414
9415 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9416 memset(prop, 0, sizeof(prop));
9417 property_get("persist.camera.facedetect", prop, "1");
9418 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9419 LOGD("Support face detection mode: %d",
9420 supportedFaceDetectMode);
9421
9422 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009423 /* support mode should be OFF if max number of face is 0 */
9424 if (maxFaces <= 0) {
9425 supportedFaceDetectMode = 0;
9426 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009427 Vector<uint8_t> availableFaceDetectModes;
9428 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9429 if (supportedFaceDetectMode == 1) {
9430 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9431 } else if (supportedFaceDetectMode == 2) {
9432 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9433 } else if (supportedFaceDetectMode == 3) {
9434 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9435 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9436 } else {
9437 maxFaces = 0;
9438 }
9439 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9440 availableFaceDetectModes.array(),
9441 availableFaceDetectModes.size());
9442 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9443 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009444 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9445 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9446 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009447
9448 int32_t exposureCompensationRange[] = {
9449 gCamCapability[cameraId]->exposure_compensation_min,
9450 gCamCapability[cameraId]->exposure_compensation_max};
9451 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9452 exposureCompensationRange,
9453 sizeof(exposureCompensationRange)/sizeof(int32_t));
9454
9455 uint8_t lensFacing = (facingBack) ?
9456 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9457 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9458
9459 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9460 available_thumbnail_sizes,
9461 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9462
9463 /*all sizes will be clubbed into this tag*/
9464 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9465 /*android.scaler.availableStreamConfigurations*/
9466 Vector<int32_t> available_stream_configs;
9467 cam_dimension_t active_array_dim;
9468 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9469 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009470
9471 /*advertise list of input dimensions supported based on below property.
9472 By default all sizes upto 5MP will be advertised.
9473 Note that the setprop resolution format should be WxH.
9474 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9475 To list all supported sizes, setprop needs to be set with "0x0" */
9476 cam_dimension_t minInputSize = {2592,1944}; //5MP
9477 memset(prop, 0, sizeof(prop));
9478 property_get("persist.camera.input.minsize", prop, "2592x1944");
9479 if (strlen(prop) > 0) {
9480 char *saveptr = NULL;
9481 char *token = strtok_r(prop, "x", &saveptr);
9482 if (token != NULL) {
9483 minInputSize.width = atoi(token);
9484 }
9485 token = strtok_r(NULL, "x", &saveptr);
9486 if (token != NULL) {
9487 minInputSize.height = atoi(token);
9488 }
9489 }
9490
Thierry Strudel3d639192016-09-09 11:52:26 -07009491 /* Add input/output stream configurations for each scalar formats*/
9492 for (size_t j = 0; j < scalar_formats_count; j++) {
9493 switch (scalar_formats[j]) {
9494 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9495 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9496 case HAL_PIXEL_FORMAT_RAW10:
9497 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9498 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9499 addStreamConfig(available_stream_configs, scalar_formats[j],
9500 gCamCapability[cameraId]->raw_dim[i],
9501 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9502 }
9503 break;
9504 case HAL_PIXEL_FORMAT_BLOB:
9505 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9506 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9507 addStreamConfig(available_stream_configs, scalar_formats[j],
9508 gCamCapability[cameraId]->picture_sizes_tbl[i],
9509 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9510 }
9511 break;
9512 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9513 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9514 default:
9515 cam_dimension_t largest_picture_size;
9516 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9517 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9518 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9519 addStreamConfig(available_stream_configs, scalar_formats[j],
9520 gCamCapability[cameraId]->picture_sizes_tbl[i],
9521 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009522 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009523 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9524 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009525 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9526 >= minInputSize.width) || (gCamCapability[cameraId]->
9527 picture_sizes_tbl[i].height >= minInputSize.height)) {
9528 addStreamConfig(available_stream_configs, scalar_formats[j],
9529 gCamCapability[cameraId]->picture_sizes_tbl[i],
9530 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9531 }
9532 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009533 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009534
Thierry Strudel3d639192016-09-09 11:52:26 -07009535 break;
9536 }
9537 }
9538
9539 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9540 available_stream_configs.array(), available_stream_configs.size());
9541 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9542 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9543
9544 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9545 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9546
9547 /* android.scaler.availableMinFrameDurations */
9548 Vector<int64_t> available_min_durations;
9549 for (size_t j = 0; j < scalar_formats_count; j++) {
9550 switch (scalar_formats[j]) {
9551 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9552 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9553 case HAL_PIXEL_FORMAT_RAW10:
9554 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9555 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9556 available_min_durations.add(scalar_formats[j]);
9557 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9558 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9559 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9560 }
9561 break;
9562 default:
9563 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9564 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9565 available_min_durations.add(scalar_formats[j]);
9566 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9567 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9568 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9569 }
9570 break;
9571 }
9572 }
9573 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9574 available_min_durations.array(), available_min_durations.size());
9575
9576 Vector<int32_t> available_hfr_configs;
9577 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9578 int32_t fps = 0;
9579 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9580 case CAM_HFR_MODE_60FPS:
9581 fps = 60;
9582 break;
9583 case CAM_HFR_MODE_90FPS:
9584 fps = 90;
9585 break;
9586 case CAM_HFR_MODE_120FPS:
9587 fps = 120;
9588 break;
9589 case CAM_HFR_MODE_150FPS:
9590 fps = 150;
9591 break;
9592 case CAM_HFR_MODE_180FPS:
9593 fps = 180;
9594 break;
9595 case CAM_HFR_MODE_210FPS:
9596 fps = 210;
9597 break;
9598 case CAM_HFR_MODE_240FPS:
9599 fps = 240;
9600 break;
9601 case CAM_HFR_MODE_480FPS:
9602 fps = 480;
9603 break;
9604 case CAM_HFR_MODE_OFF:
9605 case CAM_HFR_MODE_MAX:
9606 default:
9607 break;
9608 }
9609
9610 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9611 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9612 /* For each HFR frame rate, need to advertise one variable fps range
9613 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9614 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9615 * set by the app. When video recording is started, [120, 120] is
9616 * set. This way sensor configuration does not change when recording
9617 * is started */
9618
9619 /* (width, height, fps_min, fps_max, batch_size_max) */
9620 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9621 j < MAX_SIZES_CNT; j++) {
9622 available_hfr_configs.add(
9623 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9624 available_hfr_configs.add(
9625 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9626 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9627 available_hfr_configs.add(fps);
9628 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9629
9630 /* (width, height, fps_min, fps_max, batch_size_max) */
9631 available_hfr_configs.add(
9632 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9633 available_hfr_configs.add(
9634 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9635 available_hfr_configs.add(fps);
9636 available_hfr_configs.add(fps);
9637 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9638 }
9639 }
9640 }
9641 //Advertise HFR capability only if the property is set
9642 memset(prop, 0, sizeof(prop));
9643 property_get("persist.camera.hal3hfr.enable", prop, "1");
9644 uint8_t hfrEnable = (uint8_t)atoi(prop);
9645
9646 if(hfrEnable && available_hfr_configs.array()) {
9647 staticInfo.update(
9648 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9649 available_hfr_configs.array(), available_hfr_configs.size());
9650 }
9651
9652 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9653 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9654 &max_jpeg_size, 1);
9655
9656 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9657 size_t size = 0;
9658 count = CAM_EFFECT_MODE_MAX;
9659 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9660 for (size_t i = 0; i < count; i++) {
9661 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9662 gCamCapability[cameraId]->supported_effects[i]);
9663 if (NAME_NOT_FOUND != val) {
9664 avail_effects[size] = (uint8_t)val;
9665 size++;
9666 }
9667 }
9668 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9669 avail_effects,
9670 size);
9671
9672 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9673 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9674 size_t supported_scene_modes_cnt = 0;
9675 count = CAM_SCENE_MODE_MAX;
9676 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9677 for (size_t i = 0; i < count; i++) {
9678 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9679 CAM_SCENE_MODE_OFF) {
9680 int val = lookupFwkName(SCENE_MODES_MAP,
9681 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9682 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009683
Thierry Strudel3d639192016-09-09 11:52:26 -07009684 if (NAME_NOT_FOUND != val) {
9685 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9686 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9687 supported_scene_modes_cnt++;
9688 }
9689 }
9690 }
9691 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9692 avail_scene_modes,
9693 supported_scene_modes_cnt);
9694
9695 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9696 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9697 supported_scene_modes_cnt,
9698 CAM_SCENE_MODE_MAX,
9699 scene_mode_overrides,
9700 supported_indexes,
9701 cameraId);
9702
9703 if (supported_scene_modes_cnt == 0) {
9704 supported_scene_modes_cnt = 1;
9705 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9706 }
9707
9708 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9709 scene_mode_overrides, supported_scene_modes_cnt * 3);
9710
9711 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9712 ANDROID_CONTROL_MODE_AUTO,
9713 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9714 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9715 available_control_modes,
9716 3);
9717
9718 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9719 size = 0;
9720 count = CAM_ANTIBANDING_MODE_MAX;
9721 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9722 for (size_t i = 0; i < count; i++) {
9723 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9724 gCamCapability[cameraId]->supported_antibandings[i]);
9725 if (NAME_NOT_FOUND != val) {
9726 avail_antibanding_modes[size] = (uint8_t)val;
9727 size++;
9728 }
9729
9730 }
9731 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9732 avail_antibanding_modes,
9733 size);
9734
9735 uint8_t avail_abberation_modes[] = {
9736 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9737 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9738 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9739 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9740 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9741 if (0 == count) {
9742 // If no aberration correction modes are available for a device, this advertise OFF mode
9743 size = 1;
9744 } else {
9745 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9746 // So, advertize all 3 modes if atleast any one mode is supported as per the
9747 // new M requirement
9748 size = 3;
9749 }
9750 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9751 avail_abberation_modes,
9752 size);
9753
9754 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9755 size = 0;
9756 count = CAM_FOCUS_MODE_MAX;
9757 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9758 for (size_t i = 0; i < count; i++) {
9759 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9760 gCamCapability[cameraId]->supported_focus_modes[i]);
9761 if (NAME_NOT_FOUND != val) {
9762 avail_af_modes[size] = (uint8_t)val;
9763 size++;
9764 }
9765 }
9766 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9767 avail_af_modes,
9768 size);
9769
9770 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9771 size = 0;
9772 count = CAM_WB_MODE_MAX;
9773 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9774 for (size_t i = 0; i < count; i++) {
9775 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9776 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9777 gCamCapability[cameraId]->supported_white_balances[i]);
9778 if (NAME_NOT_FOUND != val) {
9779 avail_awb_modes[size] = (uint8_t)val;
9780 size++;
9781 }
9782 }
9783 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9784 avail_awb_modes,
9785 size);
9786
9787 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9788 count = CAM_FLASH_FIRING_LEVEL_MAX;
9789 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9790 count);
9791 for (size_t i = 0; i < count; i++) {
9792 available_flash_levels[i] =
9793 gCamCapability[cameraId]->supported_firing_levels[i];
9794 }
9795 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9796 available_flash_levels, count);
9797
9798 uint8_t flashAvailable;
9799 if (gCamCapability[cameraId]->flash_available)
9800 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9801 else
9802 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9803 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9804 &flashAvailable, 1);
9805
9806 Vector<uint8_t> avail_ae_modes;
9807 count = CAM_AE_MODE_MAX;
9808 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9809 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009810 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9811 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9812 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9813 }
9814 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009815 }
9816 if (flashAvailable) {
9817 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9818 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9819 }
9820 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9821 avail_ae_modes.array(),
9822 avail_ae_modes.size());
9823
9824 int32_t sensitivity_range[2];
9825 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9826 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9827 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9828 sensitivity_range,
9829 sizeof(sensitivity_range) / sizeof(int32_t));
9830
9831 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9832 &gCamCapability[cameraId]->max_analog_sensitivity,
9833 1);
9834
9835 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9836 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9837 &sensor_orientation,
9838 1);
9839
9840 int32_t max_output_streams[] = {
9841 MAX_STALLING_STREAMS,
9842 MAX_PROCESSED_STREAMS,
9843 MAX_RAW_STREAMS};
9844 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9845 max_output_streams,
9846 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9847
9848 uint8_t avail_leds = 0;
9849 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9850 &avail_leds, 0);
9851
9852 uint8_t focus_dist_calibrated;
9853 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9854 gCamCapability[cameraId]->focus_dist_calibrated);
9855 if (NAME_NOT_FOUND != val) {
9856 focus_dist_calibrated = (uint8_t)val;
9857 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9858 &focus_dist_calibrated, 1);
9859 }
9860
9861 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9862 size = 0;
9863 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9864 MAX_TEST_PATTERN_CNT);
9865 for (size_t i = 0; i < count; i++) {
9866 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9867 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9868 if (NAME_NOT_FOUND != testpatternMode) {
9869 avail_testpattern_modes[size] = testpatternMode;
9870 size++;
9871 }
9872 }
9873 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9874 avail_testpattern_modes,
9875 size);
9876
9877 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9878 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9879 &max_pipeline_depth,
9880 1);
9881
9882 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9883 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9884 &partial_result_count,
9885 1);
9886
9887 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9888 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9889
9890 Vector<uint8_t> available_capabilities;
9891 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9892 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9893 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9894 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9895 if (supportBurst) {
9896 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9897 }
9898 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9899 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9900 if (hfrEnable && available_hfr_configs.array()) {
9901 available_capabilities.add(
9902 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9903 }
9904
9905 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9906 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9907 }
9908 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9909 available_capabilities.array(),
9910 available_capabilities.size());
9911
9912 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9913 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9914 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9915 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9916
9917 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9918 &aeLockAvailable, 1);
9919
9920 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9921 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9922 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9923 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9924
9925 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9926 &awbLockAvailable, 1);
9927
9928 int32_t max_input_streams = 1;
9929 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9930 &max_input_streams,
9931 1);
9932
9933 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9934 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9935 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9936 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9937 HAL_PIXEL_FORMAT_YCbCr_420_888};
9938 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9939 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9940
9941 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9942 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9943 &max_latency,
9944 1);
9945
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009946#ifndef USE_HAL_3_3
9947 int32_t isp_sensitivity_range[2];
9948 isp_sensitivity_range[0] =
9949 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9950 isp_sensitivity_range[1] =
9951 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9952 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9953 isp_sensitivity_range,
9954 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9955#endif
9956
Thierry Strudel3d639192016-09-09 11:52:26 -07009957 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9958 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9959 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9960 available_hot_pixel_modes,
9961 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9962
9963 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9964 ANDROID_SHADING_MODE_FAST,
9965 ANDROID_SHADING_MODE_HIGH_QUALITY};
9966 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9967 available_shading_modes,
9968 3);
9969
9970 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9971 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9972 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9973 available_lens_shading_map_modes,
9974 2);
9975
9976 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9977 ANDROID_EDGE_MODE_FAST,
9978 ANDROID_EDGE_MODE_HIGH_QUALITY,
9979 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9980 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9981 available_edge_modes,
9982 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9983
9984 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9985 ANDROID_NOISE_REDUCTION_MODE_FAST,
9986 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9987 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9988 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9989 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9990 available_noise_red_modes,
9991 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9992
9993 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9994 ANDROID_TONEMAP_MODE_FAST,
9995 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9996 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9997 available_tonemap_modes,
9998 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9999
10000 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10001 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10002 available_hot_pixel_map_modes,
10003 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10004
10005 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10006 gCamCapability[cameraId]->reference_illuminant1);
10007 if (NAME_NOT_FOUND != val) {
10008 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10009 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10010 }
10011
10012 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10013 gCamCapability[cameraId]->reference_illuminant2);
10014 if (NAME_NOT_FOUND != val) {
10015 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10016 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10017 }
10018
10019 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10020 (void *)gCamCapability[cameraId]->forward_matrix1,
10021 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10022
10023 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10024 (void *)gCamCapability[cameraId]->forward_matrix2,
10025 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10026
10027 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10028 (void *)gCamCapability[cameraId]->color_transform1,
10029 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10030
10031 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10032 (void *)gCamCapability[cameraId]->color_transform2,
10033 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10034
10035 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10036 (void *)gCamCapability[cameraId]->calibration_transform1,
10037 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10038
10039 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10040 (void *)gCamCapability[cameraId]->calibration_transform2,
10041 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10042
10043 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10044 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10045 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10046 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10047 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10048 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10049 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10050 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10051 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10052 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10053 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10054 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10055 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10056 ANDROID_JPEG_GPS_COORDINATES,
10057 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10058 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10059 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10060 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10061 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10062 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10063 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10064 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10065 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10066 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010067#ifndef USE_HAL_3_3
10068 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10069#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010070 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010071 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010072 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10073 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010074 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010075 /* DevCamDebug metadata request_keys_basic */
10076 DEVCAMDEBUG_META_ENABLE,
10077 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010078 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010079 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010080 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010081 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010082 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010083 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010084
10085 size_t request_keys_cnt =
10086 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10087 Vector<int32_t> available_request_keys;
10088 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10089 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10090 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10091 }
10092
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010093 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010094 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010095 }
10096
Thierry Strudel3d639192016-09-09 11:52:26 -070010097 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10098 available_request_keys.array(), available_request_keys.size());
10099
10100 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10101 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10102 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10103 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10104 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10105 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10106 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10107 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10108 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10109 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10110 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10111 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10112 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10113 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10114 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10115 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10116 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010117 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010118 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10119 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10120 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010121 ANDROID_STATISTICS_FACE_SCORES,
10122#ifndef USE_HAL_3_3
10123 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10124#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010125 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010126 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010127 // DevCamDebug metadata result_keys_basic
10128 DEVCAMDEBUG_META_ENABLE,
10129 // DevCamDebug metadata result_keys AF
10130 DEVCAMDEBUG_AF_LENS_POSITION,
10131 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10132 DEVCAMDEBUG_AF_TOF_DISTANCE,
10133 DEVCAMDEBUG_AF_LUMA,
10134 DEVCAMDEBUG_AF_HAF_STATE,
10135 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10136 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10137 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10138 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10139 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10140 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10141 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10142 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10143 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10144 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10145 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10146 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10147 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10148 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10149 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10150 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10151 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10152 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10153 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10154 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10155 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10156 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10157 // DevCamDebug metadata result_keys AEC
10158 DEVCAMDEBUG_AEC_TARGET_LUMA,
10159 DEVCAMDEBUG_AEC_COMP_LUMA,
10160 DEVCAMDEBUG_AEC_AVG_LUMA,
10161 DEVCAMDEBUG_AEC_CUR_LUMA,
10162 DEVCAMDEBUG_AEC_LINECOUNT,
10163 DEVCAMDEBUG_AEC_REAL_GAIN,
10164 DEVCAMDEBUG_AEC_EXP_INDEX,
10165 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010166 // DevCamDebug metadata result_keys zzHDR
10167 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10168 DEVCAMDEBUG_AEC_L_LINECOUNT,
10169 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10170 DEVCAMDEBUG_AEC_S_LINECOUNT,
10171 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10172 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10173 // DevCamDebug metadata result_keys ADRC
10174 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10175 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10176 DEVCAMDEBUG_AEC_GTM_RATIO,
10177 DEVCAMDEBUG_AEC_LTM_RATIO,
10178 DEVCAMDEBUG_AEC_LA_RATIO,
10179 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010180 // DevCamDebug metadata result_keys AWB
10181 DEVCAMDEBUG_AWB_R_GAIN,
10182 DEVCAMDEBUG_AWB_G_GAIN,
10183 DEVCAMDEBUG_AWB_B_GAIN,
10184 DEVCAMDEBUG_AWB_CCT,
10185 DEVCAMDEBUG_AWB_DECISION,
10186 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010187 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10188 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10189 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010190 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010191 };
10192
Thierry Strudel3d639192016-09-09 11:52:26 -070010193 size_t result_keys_cnt =
10194 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10195
10196 Vector<int32_t> available_result_keys;
10197 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10198 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10199 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10200 }
10201 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10202 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10203 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10204 }
10205 if (supportedFaceDetectMode == 1) {
10206 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10207 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10208 } else if ((supportedFaceDetectMode == 2) ||
10209 (supportedFaceDetectMode == 3)) {
10210 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10211 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10212 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010213#ifndef USE_HAL_3_3
10214 if (hasBlackRegions) {
10215 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10216 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10217 }
10218#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010219
10220 if (gExposeEnableZslKey) {
10221 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10222 }
10223
Thierry Strudel3d639192016-09-09 11:52:26 -070010224 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10225 available_result_keys.array(), available_result_keys.size());
10226
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010227 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010228 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10229 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10230 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10231 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10232 ANDROID_SCALER_CROPPING_TYPE,
10233 ANDROID_SYNC_MAX_LATENCY,
10234 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10235 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10236 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10237 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10238 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10239 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10240 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10241 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10242 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10243 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10244 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10245 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10246 ANDROID_LENS_FACING,
10247 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10248 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10249 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10250 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10251 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10252 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10253 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10254 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10255 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10256 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10257 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10258 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10259 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10260 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10261 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10262 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10263 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10264 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10265 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10266 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010267 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010268 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10269 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10270 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10271 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10272 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10273 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10274 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10275 ANDROID_CONTROL_AVAILABLE_MODES,
10276 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10277 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10278 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10279 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010280 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10281#ifndef USE_HAL_3_3
10282 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10283 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10284#endif
10285 };
10286
10287 Vector<int32_t> available_characteristics_keys;
10288 available_characteristics_keys.appendArray(characteristics_keys_basic,
10289 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10290#ifndef USE_HAL_3_3
10291 if (hasBlackRegions) {
10292 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10293 }
10294#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010295
10296 if (0 <= indexPD) {
10297 int32_t depthKeys[] = {
10298 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10299 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10300 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10301 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10302 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10303 };
10304 available_characteristics_keys.appendArray(depthKeys,
10305 sizeof(depthKeys) / sizeof(depthKeys[0]));
10306 }
10307
Thierry Strudel3d639192016-09-09 11:52:26 -070010308 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010309 available_characteristics_keys.array(),
10310 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010311
10312 /*available stall durations depend on the hw + sw and will be different for different devices */
10313 /*have to add for raw after implementation*/
10314 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10315 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10316
10317 Vector<int64_t> available_stall_durations;
10318 for (uint32_t j = 0; j < stall_formats_count; j++) {
10319 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10320 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10321 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10322 available_stall_durations.add(stall_formats[j]);
10323 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10324 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10325 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10326 }
10327 } else {
10328 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10329 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10330 available_stall_durations.add(stall_formats[j]);
10331 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10332 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10333 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10334 }
10335 }
10336 }
10337 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10338 available_stall_durations.array(),
10339 available_stall_durations.size());
10340
10341 //QCAMERA3_OPAQUE_RAW
10342 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10343 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10344 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10345 case LEGACY_RAW:
10346 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10347 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10348 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10349 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10350 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10351 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10352 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10353 break;
10354 case MIPI_RAW:
10355 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10356 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10357 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10358 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10359 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10360 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10361 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10362 break;
10363 default:
10364 LOGE("unknown opaque_raw_format %d",
10365 gCamCapability[cameraId]->opaque_raw_fmt);
10366 break;
10367 }
10368 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10369
10370 Vector<int32_t> strides;
10371 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10372 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10373 cam_stream_buf_plane_info_t buf_planes;
10374 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10375 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10376 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10377 &gCamCapability[cameraId]->padding_info, &buf_planes);
10378 strides.add(buf_planes.plane_info.mp[0].stride);
10379 }
10380 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10381 strides.size());
10382
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010383 //TBD: remove the following line once backend advertises zzHDR in feature mask
10384 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010385 //Video HDR default
10386 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10387 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010388 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010389 int32_t vhdr_mode[] = {
10390 QCAMERA3_VIDEO_HDR_MODE_OFF,
10391 QCAMERA3_VIDEO_HDR_MODE_ON};
10392
10393 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10394 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10395 vhdr_mode, vhdr_mode_count);
10396 }
10397
Thierry Strudel3d639192016-09-09 11:52:26 -070010398 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10399 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10400 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10401
10402 uint8_t isMonoOnly =
10403 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10404 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10405 &isMonoOnly, 1);
10406
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010407#ifndef USE_HAL_3_3
10408 Vector<int32_t> opaque_size;
10409 for (size_t j = 0; j < scalar_formats_count; j++) {
10410 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10411 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10412 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10413 cam_stream_buf_plane_info_t buf_planes;
10414
10415 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10416 &gCamCapability[cameraId]->padding_info, &buf_planes);
10417
10418 if (rc == 0) {
10419 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10420 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10421 opaque_size.add(buf_planes.plane_info.frame_len);
10422 }else {
10423 LOGE("raw frame calculation failed!");
10424 }
10425 }
10426 }
10427 }
10428
10429 if ((opaque_size.size() > 0) &&
10430 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10431 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10432 else
10433 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10434#endif
10435
Thierry Strudel04e026f2016-10-10 11:27:36 -070010436 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10437 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10438 size = 0;
10439 count = CAM_IR_MODE_MAX;
10440 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10441 for (size_t i = 0; i < count; i++) {
10442 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10443 gCamCapability[cameraId]->supported_ir_modes[i]);
10444 if (NAME_NOT_FOUND != val) {
10445 avail_ir_modes[size] = (int32_t)val;
10446 size++;
10447 }
10448 }
10449 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10450 avail_ir_modes, size);
10451 }
10452
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010453 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10454 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10455 size = 0;
10456 count = CAM_AEC_CONVERGENCE_MAX;
10457 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10458 for (size_t i = 0; i < count; i++) {
10459 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10460 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10461 if (NAME_NOT_FOUND != val) {
10462 available_instant_aec_modes[size] = (int32_t)val;
10463 size++;
10464 }
10465 }
10466 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10467 available_instant_aec_modes, size);
10468 }
10469
Thierry Strudel54dc9782017-02-15 12:12:10 -080010470 int32_t sharpness_range[] = {
10471 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10472 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10473 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10474
10475 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10476 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10477 size = 0;
10478 count = CAM_BINNING_CORRECTION_MODE_MAX;
10479 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10480 for (size_t i = 0; i < count; i++) {
10481 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10482 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10483 gCamCapability[cameraId]->supported_binning_modes[i]);
10484 if (NAME_NOT_FOUND != val) {
10485 avail_binning_modes[size] = (int32_t)val;
10486 size++;
10487 }
10488 }
10489 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10490 avail_binning_modes, size);
10491 }
10492
10493 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10494 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10495 size = 0;
10496 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10497 for (size_t i = 0; i < count; i++) {
10498 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10499 gCamCapability[cameraId]->supported_aec_modes[i]);
10500 if (NAME_NOT_FOUND != val)
10501 available_aec_modes[size++] = val;
10502 }
10503 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10504 available_aec_modes, size);
10505 }
10506
10507 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10508 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10509 size = 0;
10510 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10511 for (size_t i = 0; i < count; i++) {
10512 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10513 gCamCapability[cameraId]->supported_iso_modes[i]);
10514 if (NAME_NOT_FOUND != val)
10515 available_iso_modes[size++] = val;
10516 }
10517 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10518 available_iso_modes, size);
10519 }
10520
10521 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010522 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010523 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10524 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10525 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10526
10527 int32_t available_saturation_range[4];
10528 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10529 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10530 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10531 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10532 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10533 available_saturation_range, 4);
10534
10535 uint8_t is_hdr_values[2];
10536 is_hdr_values[0] = 0;
10537 is_hdr_values[1] = 1;
10538 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10539 is_hdr_values, 2);
10540
10541 float is_hdr_confidence_range[2];
10542 is_hdr_confidence_range[0] = 0.0;
10543 is_hdr_confidence_range[1] = 1.0;
10544 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10545 is_hdr_confidence_range, 2);
10546
Emilian Peev0a972ef2017-03-16 10:25:53 +000010547 size_t eepromLength = strnlen(
10548 reinterpret_cast<const char *>(
10549 gCamCapability[cameraId]->eeprom_version_info),
10550 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10551 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010552 char easelInfo[] = ",E:N";
10553 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10554 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10555 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010556 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10557 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010558 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010559 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010560 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10561 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10562 }
10563
Thierry Strudel3d639192016-09-09 11:52:26 -070010564 gStaticMetadata[cameraId] = staticInfo.release();
10565 return rc;
10566}
10567
10568/*===========================================================================
10569 * FUNCTION : makeTable
10570 *
10571 * DESCRIPTION: make a table of sizes
10572 *
10573 * PARAMETERS :
10574 *
10575 *
10576 *==========================================================================*/
10577void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10578 size_t max_size, int32_t *sizeTable)
10579{
10580 size_t j = 0;
10581 if (size > max_size) {
10582 size = max_size;
10583 }
10584 for (size_t i = 0; i < size; i++) {
10585 sizeTable[j] = dimTable[i].width;
10586 sizeTable[j+1] = dimTable[i].height;
10587 j+=2;
10588 }
10589}
10590
10591/*===========================================================================
10592 * FUNCTION : makeFPSTable
10593 *
10594 * DESCRIPTION: make a table of fps ranges
10595 *
10596 * PARAMETERS :
10597 *
10598 *==========================================================================*/
10599void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10600 size_t max_size, int32_t *fpsRangesTable)
10601{
10602 size_t j = 0;
10603 if (size > max_size) {
10604 size = max_size;
10605 }
10606 for (size_t i = 0; i < size; i++) {
10607 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10608 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10609 j+=2;
10610 }
10611}
10612
10613/*===========================================================================
10614 * FUNCTION : makeOverridesList
10615 *
10616 * DESCRIPTION: make a list of scene mode overrides
10617 *
10618 * PARAMETERS :
10619 *
10620 *
10621 *==========================================================================*/
10622void QCamera3HardwareInterface::makeOverridesList(
10623 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10624 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10625{
10626 /*daemon will give a list of overrides for all scene modes.
10627 However we should send the fwk only the overrides for the scene modes
10628 supported by the framework*/
10629 size_t j = 0;
10630 if (size > max_size) {
10631 size = max_size;
10632 }
10633 size_t focus_count = CAM_FOCUS_MODE_MAX;
10634 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10635 focus_count);
10636 for (size_t i = 0; i < size; i++) {
10637 bool supt = false;
10638 size_t index = supported_indexes[i];
10639 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10640 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10641 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10642 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10643 overridesTable[index].awb_mode);
10644 if (NAME_NOT_FOUND != val) {
10645 overridesList[j+1] = (uint8_t)val;
10646 }
10647 uint8_t focus_override = overridesTable[index].af_mode;
10648 for (size_t k = 0; k < focus_count; k++) {
10649 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10650 supt = true;
10651 break;
10652 }
10653 }
10654 if (supt) {
10655 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10656 focus_override);
10657 if (NAME_NOT_FOUND != val) {
10658 overridesList[j+2] = (uint8_t)val;
10659 }
10660 } else {
10661 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10662 }
10663 j+=3;
10664 }
10665}
10666
10667/*===========================================================================
10668 * FUNCTION : filterJpegSizes
10669 *
10670 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10671 * could be downscaled to
10672 *
10673 * PARAMETERS :
10674 *
10675 * RETURN : length of jpegSizes array
10676 *==========================================================================*/
10677
10678size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10679 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10680 uint8_t downscale_factor)
10681{
10682 if (0 == downscale_factor) {
10683 downscale_factor = 1;
10684 }
10685
10686 int32_t min_width = active_array_size.width / downscale_factor;
10687 int32_t min_height = active_array_size.height / downscale_factor;
10688 size_t jpegSizesCnt = 0;
10689 if (processedSizesCnt > maxCount) {
10690 processedSizesCnt = maxCount;
10691 }
10692 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10693 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10694 jpegSizes[jpegSizesCnt] = processedSizes[i];
10695 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10696 jpegSizesCnt += 2;
10697 }
10698 }
10699 return jpegSizesCnt;
10700}
10701
10702/*===========================================================================
10703 * FUNCTION : computeNoiseModelEntryS
10704 *
10705 * DESCRIPTION: function to map a given sensitivity to the S noise
10706 * model parameters in the DNG noise model.
10707 *
10708 * PARAMETERS : sens : the sensor sensitivity
10709 *
10710 ** RETURN : S (sensor amplification) noise
10711 *
10712 *==========================================================================*/
10713double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10714 double s = gCamCapability[mCameraId]->gradient_S * sens +
10715 gCamCapability[mCameraId]->offset_S;
10716 return ((s < 0.0) ? 0.0 : s);
10717}
10718
10719/*===========================================================================
10720 * FUNCTION : computeNoiseModelEntryO
10721 *
10722 * DESCRIPTION: function to map a given sensitivity to the O noise
10723 * model parameters in the DNG noise model.
10724 *
10725 * PARAMETERS : sens : the sensor sensitivity
10726 *
10727 ** RETURN : O (sensor readout) noise
10728 *
10729 *==========================================================================*/
10730double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10731 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10732 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10733 1.0 : (1.0 * sens / max_analog_sens);
10734 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10735 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10736 return ((o < 0.0) ? 0.0 : o);
10737}
10738
10739/*===========================================================================
10740 * FUNCTION : getSensorSensitivity
10741 *
10742 * DESCRIPTION: convert iso_mode to an integer value
10743 *
10744 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10745 *
10746 ** RETURN : sensitivity supported by sensor
10747 *
10748 *==========================================================================*/
10749int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10750{
10751 int32_t sensitivity;
10752
10753 switch (iso_mode) {
10754 case CAM_ISO_MODE_100:
10755 sensitivity = 100;
10756 break;
10757 case CAM_ISO_MODE_200:
10758 sensitivity = 200;
10759 break;
10760 case CAM_ISO_MODE_400:
10761 sensitivity = 400;
10762 break;
10763 case CAM_ISO_MODE_800:
10764 sensitivity = 800;
10765 break;
10766 case CAM_ISO_MODE_1600:
10767 sensitivity = 1600;
10768 break;
10769 default:
10770 sensitivity = -1;
10771 break;
10772 }
10773 return sensitivity;
10774}
10775
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010776int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010777 if (gEaselManagerClient == nullptr) {
10778 gEaselManagerClient = EaselManagerClient::create();
10779 if (gEaselManagerClient == nullptr) {
10780 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10781 return -ENODEV;
10782 }
10783 }
10784
10785 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010786 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10787 // to connect to Easel.
10788 bool doNotpowerOnEasel =
10789 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10790
10791 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010792 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10793 return OK;
10794 }
10795
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010796 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010797 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010798 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010799 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010800 return res;
10801 }
10802
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010803 EaselManagerClientOpened = true;
10804
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010805 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010806 if (res != OK) {
10807 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10808 }
10809
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010810 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010811 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010812
10813 // Expose enableZsl key only when HDR+ mode is enabled.
10814 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010815 }
10816
10817 return OK;
10818}
10819
Thierry Strudel3d639192016-09-09 11:52:26 -070010820/*===========================================================================
10821 * FUNCTION : getCamInfo
10822 *
10823 * DESCRIPTION: query camera capabilities
10824 *
10825 * PARAMETERS :
10826 * @cameraId : camera Id
10827 * @info : camera info struct to be filled in with camera capabilities
10828 *
10829 * RETURN : int type of status
10830 * NO_ERROR -- success
10831 * none-zero failure code
10832 *==========================================================================*/
10833int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10834 struct camera_info *info)
10835{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010836 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010837 int rc = 0;
10838
10839 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010840
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010841 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070010842 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010843 rc = initHdrPlusClientLocked();
10844 if (rc != OK) {
10845 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10846 pthread_mutex_unlock(&gCamLock);
10847 return rc;
10848 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010849 }
10850
Thierry Strudel3d639192016-09-09 11:52:26 -070010851 if (NULL == gCamCapability[cameraId]) {
10852 rc = initCapabilities(cameraId);
10853 if (rc < 0) {
10854 pthread_mutex_unlock(&gCamLock);
10855 return rc;
10856 }
10857 }
10858
10859 if (NULL == gStaticMetadata[cameraId]) {
10860 rc = initStaticMetadata(cameraId);
10861 if (rc < 0) {
10862 pthread_mutex_unlock(&gCamLock);
10863 return rc;
10864 }
10865 }
10866
10867 switch(gCamCapability[cameraId]->position) {
10868 case CAM_POSITION_BACK:
10869 case CAM_POSITION_BACK_AUX:
10870 info->facing = CAMERA_FACING_BACK;
10871 break;
10872
10873 case CAM_POSITION_FRONT:
10874 case CAM_POSITION_FRONT_AUX:
10875 info->facing = CAMERA_FACING_FRONT;
10876 break;
10877
10878 default:
10879 LOGE("Unknown position type %d for camera id:%d",
10880 gCamCapability[cameraId]->position, cameraId);
10881 rc = -1;
10882 break;
10883 }
10884
10885
10886 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010887#ifndef USE_HAL_3_3
10888 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10889#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010890 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010891#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010892 info->static_camera_characteristics = gStaticMetadata[cameraId];
10893
10894 //For now assume both cameras can operate independently.
10895 info->conflicting_devices = NULL;
10896 info->conflicting_devices_length = 0;
10897
10898 //resource cost is 100 * MIN(1.0, m/M),
10899 //where m is throughput requirement with maximum stream configuration
10900 //and M is CPP maximum throughput.
10901 float max_fps = 0.0;
10902 for (uint32_t i = 0;
10903 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10904 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10905 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10906 }
10907 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10908 gCamCapability[cameraId]->active_array_size.width *
10909 gCamCapability[cameraId]->active_array_size.height * max_fps /
10910 gCamCapability[cameraId]->max_pixel_bandwidth;
10911 info->resource_cost = 100 * MIN(1.0, ratio);
10912 LOGI("camera %d resource cost is %d", cameraId,
10913 info->resource_cost);
10914
10915 pthread_mutex_unlock(&gCamLock);
10916 return rc;
10917}
10918
10919/*===========================================================================
10920 * FUNCTION : translateCapabilityToMetadata
10921 *
10922 * DESCRIPTION: translate the capability into camera_metadata_t
10923 *
10924 * PARAMETERS : type of the request
10925 *
10926 *
10927 * RETURN : success: camera_metadata_t*
10928 * failure: NULL
10929 *
10930 *==========================================================================*/
10931camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10932{
10933 if (mDefaultMetadata[type] != NULL) {
10934 return mDefaultMetadata[type];
10935 }
10936 //first time we are handling this request
10937 //fill up the metadata structure using the wrapper class
10938 CameraMetadata settings;
10939 //translate from cam_capability_t to camera_metadata_tag_t
10940 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10941 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10942 int32_t defaultRequestID = 0;
10943 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10944
10945 /* OIS disable */
10946 char ois_prop[PROPERTY_VALUE_MAX];
10947 memset(ois_prop, 0, sizeof(ois_prop));
10948 property_get("persist.camera.ois.disable", ois_prop, "0");
10949 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10950
10951 /* Force video to use OIS */
10952 char videoOisProp[PROPERTY_VALUE_MAX];
10953 memset(videoOisProp, 0, sizeof(videoOisProp));
10954 property_get("persist.camera.ois.video", videoOisProp, "1");
10955 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010956
10957 // Hybrid AE enable/disable
10958 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10959 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10960 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10961 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10962
Thierry Strudel3d639192016-09-09 11:52:26 -070010963 uint8_t controlIntent = 0;
10964 uint8_t focusMode;
10965 uint8_t vsMode;
10966 uint8_t optStabMode;
10967 uint8_t cacMode;
10968 uint8_t edge_mode;
10969 uint8_t noise_red_mode;
10970 uint8_t tonemap_mode;
10971 bool highQualityModeEntryAvailable = FALSE;
10972 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010973 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010974 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10975 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010976 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010977 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010978 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010979
Thierry Strudel3d639192016-09-09 11:52:26 -070010980 switch (type) {
10981 case CAMERA3_TEMPLATE_PREVIEW:
10982 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10983 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10984 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10985 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10986 edge_mode = ANDROID_EDGE_MODE_FAST;
10987 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10988 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10989 break;
10990 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10991 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10992 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10993 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10994 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10995 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10996 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10997 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10998 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10999 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11000 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11001 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11002 highQualityModeEntryAvailable = TRUE;
11003 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11004 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11005 fastModeEntryAvailable = TRUE;
11006 }
11007 }
11008 if (highQualityModeEntryAvailable) {
11009 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11010 } else if (fastModeEntryAvailable) {
11011 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11012 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011013 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11014 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11015 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011016 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011017 break;
11018 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11019 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11020 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11021 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011022 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11023 edge_mode = ANDROID_EDGE_MODE_FAST;
11024 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11025 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11026 if (forceVideoOis)
11027 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11028 break;
11029 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11030 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11031 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11032 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011033 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11034 edge_mode = ANDROID_EDGE_MODE_FAST;
11035 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11036 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11037 if (forceVideoOis)
11038 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11039 break;
11040 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11041 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11042 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11043 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11044 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11045 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11046 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11047 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11048 break;
11049 case CAMERA3_TEMPLATE_MANUAL:
11050 edge_mode = ANDROID_EDGE_MODE_FAST;
11051 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11052 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11053 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11054 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11055 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11056 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11057 break;
11058 default:
11059 edge_mode = ANDROID_EDGE_MODE_FAST;
11060 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11061 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11062 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11063 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11064 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11065 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11066 break;
11067 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011068 // Set CAC to OFF if underlying device doesn't support
11069 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11070 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11071 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011072 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11073 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11074 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11075 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11076 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11077 }
11078 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011079 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011080 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011081
11082 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11083 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11084 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11085 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11086 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11087 || ois_disable)
11088 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11089 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011090 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011091
11092 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11093 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11094
11095 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11096 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11097
11098 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11099 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11100
11101 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11102 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11103
11104 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11105 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11106
11107 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11108 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11109
11110 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11111 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11112
11113 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11114 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11115
11116 /*flash*/
11117 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11118 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11119
11120 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11121 settings.update(ANDROID_FLASH_FIRING_POWER,
11122 &flashFiringLevel, 1);
11123
11124 /* lens */
11125 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11126 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11127
11128 if (gCamCapability[mCameraId]->filter_densities_count) {
11129 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11130 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11131 gCamCapability[mCameraId]->filter_densities_count);
11132 }
11133
11134 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11135 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11136
Thierry Strudel3d639192016-09-09 11:52:26 -070011137 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11138 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11139
11140 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11141 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11142
11143 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11144 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11145
11146 /* face detection (default to OFF) */
11147 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11148 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11149
Thierry Strudel54dc9782017-02-15 12:12:10 -080011150 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11151 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011152
11153 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11154 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11155
11156 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11157 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11158
Thierry Strudel3d639192016-09-09 11:52:26 -070011159
11160 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11161 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11162
11163 /* Exposure time(Update the Min Exposure Time)*/
11164 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11165 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11166
11167 /* frame duration */
11168 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11169 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11170
11171 /* sensitivity */
11172 static const int32_t default_sensitivity = 100;
11173 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011174#ifndef USE_HAL_3_3
11175 static const int32_t default_isp_sensitivity =
11176 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11177 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11178#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011179
11180 /*edge mode*/
11181 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11182
11183 /*noise reduction mode*/
11184 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11185
11186 /*color correction mode*/
11187 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11188 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11189
11190 /*transform matrix mode*/
11191 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11192
11193 int32_t scaler_crop_region[4];
11194 scaler_crop_region[0] = 0;
11195 scaler_crop_region[1] = 0;
11196 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11197 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11198 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11199
11200 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11201 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11202
11203 /*focus distance*/
11204 float focus_distance = 0.0;
11205 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11206
11207 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011208 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011209 float max_range = 0.0;
11210 float max_fixed_fps = 0.0;
11211 int32_t fps_range[2] = {0, 0};
11212 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11213 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011214 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11215 TEMPLATE_MAX_PREVIEW_FPS) {
11216 continue;
11217 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011218 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11219 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11220 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11221 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11222 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11223 if (range > max_range) {
11224 fps_range[0] =
11225 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11226 fps_range[1] =
11227 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11228 max_range = range;
11229 }
11230 } else {
11231 if (range < 0.01 && max_fixed_fps <
11232 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11233 fps_range[0] =
11234 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11235 fps_range[1] =
11236 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11237 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11238 }
11239 }
11240 }
11241 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11242
11243 /*precapture trigger*/
11244 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11245 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11246
11247 /*af trigger*/
11248 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11249 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11250
11251 /* ae & af regions */
11252 int32_t active_region[] = {
11253 gCamCapability[mCameraId]->active_array_size.left,
11254 gCamCapability[mCameraId]->active_array_size.top,
11255 gCamCapability[mCameraId]->active_array_size.left +
11256 gCamCapability[mCameraId]->active_array_size.width,
11257 gCamCapability[mCameraId]->active_array_size.top +
11258 gCamCapability[mCameraId]->active_array_size.height,
11259 0};
11260 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11261 sizeof(active_region) / sizeof(active_region[0]));
11262 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11263 sizeof(active_region) / sizeof(active_region[0]));
11264
11265 /* black level lock */
11266 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11267 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11268
Thierry Strudel3d639192016-09-09 11:52:26 -070011269 //special defaults for manual template
11270 if (type == CAMERA3_TEMPLATE_MANUAL) {
11271 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11272 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11273
11274 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11275 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11276
11277 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11278 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11279
11280 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11281 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11282
11283 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11284 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11285
11286 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11287 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11288 }
11289
11290
11291 /* TNR
11292 * We'll use this location to determine which modes TNR will be set.
11293 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11294 * This is not to be confused with linking on a per stream basis that decision
11295 * is still on per-session basis and will be handled as part of config stream
11296 */
11297 uint8_t tnr_enable = 0;
11298
11299 if (m_bTnrPreview || m_bTnrVideo) {
11300
11301 switch (type) {
11302 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11303 tnr_enable = 1;
11304 break;
11305
11306 default:
11307 tnr_enable = 0;
11308 break;
11309 }
11310
11311 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11312 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11313 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11314
11315 LOGD("TNR:%d with process plate %d for template:%d",
11316 tnr_enable, tnr_process_type, type);
11317 }
11318
11319 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011320 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011321 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11322
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011323 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011324 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11325
Shuzhen Wang920ea402017-05-03 08:49:39 -070011326 uint8_t related_camera_id = mCameraId;
11327 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011328
11329 /* CDS default */
11330 char prop[PROPERTY_VALUE_MAX];
11331 memset(prop, 0, sizeof(prop));
11332 property_get("persist.camera.CDS", prop, "Auto");
11333 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11334 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11335 if (CAM_CDS_MODE_MAX == cds_mode) {
11336 cds_mode = CAM_CDS_MODE_AUTO;
11337 }
11338
11339 /* Disabling CDS in templates which have TNR enabled*/
11340 if (tnr_enable)
11341 cds_mode = CAM_CDS_MODE_OFF;
11342
11343 int32_t mode = cds_mode;
11344 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011345
Thierry Strudel269c81a2016-10-12 12:13:59 -070011346 /* Manual Convergence AEC Speed is disabled by default*/
11347 float default_aec_speed = 0;
11348 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11349
11350 /* Manual Convergence AWB Speed is disabled by default*/
11351 float default_awb_speed = 0;
11352 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11353
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011354 // Set instant AEC to normal convergence by default
11355 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11356 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11357
Shuzhen Wang19463d72016-03-08 11:09:52 -080011358 /* hybrid ae */
11359 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11360
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011361 if (gExposeEnableZslKey) {
11362 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11363 }
11364
Thierry Strudel3d639192016-09-09 11:52:26 -070011365 mDefaultMetadata[type] = settings.release();
11366
11367 return mDefaultMetadata[type];
11368}
11369
11370/*===========================================================================
11371 * FUNCTION : setFrameParameters
11372 *
11373 * DESCRIPTION: set parameters per frame as requested in the metadata from
11374 * framework
11375 *
11376 * PARAMETERS :
11377 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011378 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011379 * @blob_request: Whether this request is a blob request or not
11380 *
11381 * RETURN : success: NO_ERROR
11382 * failure:
11383 *==========================================================================*/
11384int QCamera3HardwareInterface::setFrameParameters(
11385 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011386 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011387 int blob_request,
11388 uint32_t snapshotStreamId)
11389{
11390 /*translate from camera_metadata_t type to parm_type_t*/
11391 int rc = 0;
11392 int32_t hal_version = CAM_HAL_V3;
11393
11394 clear_metadata_buffer(mParameters);
11395 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11396 LOGE("Failed to set hal version in the parameters");
11397 return BAD_VALUE;
11398 }
11399
11400 /*we need to update the frame number in the parameters*/
11401 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11402 request->frame_number)) {
11403 LOGE("Failed to set the frame number in the parameters");
11404 return BAD_VALUE;
11405 }
11406
11407 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011408 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011409 LOGE("Failed to set stream type mask in the parameters");
11410 return BAD_VALUE;
11411 }
11412
11413 if (mUpdateDebugLevel) {
11414 uint32_t dummyDebugLevel = 0;
11415 /* The value of dummyDebugLevel is irrelavent. On
11416 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11417 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11418 dummyDebugLevel)) {
11419 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11420 return BAD_VALUE;
11421 }
11422 mUpdateDebugLevel = false;
11423 }
11424
11425 if(request->settings != NULL){
11426 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11427 if (blob_request)
11428 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11429 }
11430
11431 return rc;
11432}
11433
11434/*===========================================================================
11435 * FUNCTION : setReprocParameters
11436 *
11437 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11438 * return it.
11439 *
11440 * PARAMETERS :
11441 * @request : request that needs to be serviced
11442 *
11443 * RETURN : success: NO_ERROR
11444 * failure:
11445 *==========================================================================*/
11446int32_t QCamera3HardwareInterface::setReprocParameters(
11447 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11448 uint32_t snapshotStreamId)
11449{
11450 /*translate from camera_metadata_t type to parm_type_t*/
11451 int rc = 0;
11452
11453 if (NULL == request->settings){
11454 LOGE("Reprocess settings cannot be NULL");
11455 return BAD_VALUE;
11456 }
11457
11458 if (NULL == reprocParam) {
11459 LOGE("Invalid reprocessing metadata buffer");
11460 return BAD_VALUE;
11461 }
11462 clear_metadata_buffer(reprocParam);
11463
11464 /*we need to update the frame number in the parameters*/
11465 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11466 request->frame_number)) {
11467 LOGE("Failed to set the frame number in the parameters");
11468 return BAD_VALUE;
11469 }
11470
11471 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11472 if (rc < 0) {
11473 LOGE("Failed to translate reproc request");
11474 return rc;
11475 }
11476
11477 CameraMetadata frame_settings;
11478 frame_settings = request->settings;
11479 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11480 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11481 int32_t *crop_count =
11482 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11483 int32_t *crop_data =
11484 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11485 int32_t *roi_map =
11486 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11487 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11488 cam_crop_data_t crop_meta;
11489 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11490 crop_meta.num_of_streams = 1;
11491 crop_meta.crop_info[0].crop.left = crop_data[0];
11492 crop_meta.crop_info[0].crop.top = crop_data[1];
11493 crop_meta.crop_info[0].crop.width = crop_data[2];
11494 crop_meta.crop_info[0].crop.height = crop_data[3];
11495
11496 crop_meta.crop_info[0].roi_map.left =
11497 roi_map[0];
11498 crop_meta.crop_info[0].roi_map.top =
11499 roi_map[1];
11500 crop_meta.crop_info[0].roi_map.width =
11501 roi_map[2];
11502 crop_meta.crop_info[0].roi_map.height =
11503 roi_map[3];
11504
11505 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11506 rc = BAD_VALUE;
11507 }
11508 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11509 request->input_buffer->stream,
11510 crop_meta.crop_info[0].crop.left,
11511 crop_meta.crop_info[0].crop.top,
11512 crop_meta.crop_info[0].crop.width,
11513 crop_meta.crop_info[0].crop.height);
11514 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11515 request->input_buffer->stream,
11516 crop_meta.crop_info[0].roi_map.left,
11517 crop_meta.crop_info[0].roi_map.top,
11518 crop_meta.crop_info[0].roi_map.width,
11519 crop_meta.crop_info[0].roi_map.height);
11520 } else {
11521 LOGE("Invalid reprocess crop count %d!", *crop_count);
11522 }
11523 } else {
11524 LOGE("No crop data from matching output stream");
11525 }
11526
11527 /* These settings are not needed for regular requests so handle them specially for
11528 reprocess requests; information needed for EXIF tags */
11529 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11530 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11531 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11532 if (NAME_NOT_FOUND != val) {
11533 uint32_t flashMode = (uint32_t)val;
11534 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11535 rc = BAD_VALUE;
11536 }
11537 } else {
11538 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11539 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11540 }
11541 } else {
11542 LOGH("No flash mode in reprocess settings");
11543 }
11544
11545 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11546 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11547 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11548 rc = BAD_VALUE;
11549 }
11550 } else {
11551 LOGH("No flash state in reprocess settings");
11552 }
11553
11554 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11555 uint8_t *reprocessFlags =
11556 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11557 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11558 *reprocessFlags)) {
11559 rc = BAD_VALUE;
11560 }
11561 }
11562
Thierry Strudel54dc9782017-02-15 12:12:10 -080011563 // Add exif debug data to internal metadata
11564 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11565 mm_jpeg_debug_exif_params_t *debug_params =
11566 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11567 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11568 // AE
11569 if (debug_params->ae_debug_params_valid == TRUE) {
11570 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11571 debug_params->ae_debug_params);
11572 }
11573 // AWB
11574 if (debug_params->awb_debug_params_valid == TRUE) {
11575 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11576 debug_params->awb_debug_params);
11577 }
11578 // AF
11579 if (debug_params->af_debug_params_valid == TRUE) {
11580 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11581 debug_params->af_debug_params);
11582 }
11583 // ASD
11584 if (debug_params->asd_debug_params_valid == TRUE) {
11585 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11586 debug_params->asd_debug_params);
11587 }
11588 // Stats
11589 if (debug_params->stats_debug_params_valid == TRUE) {
11590 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11591 debug_params->stats_debug_params);
11592 }
11593 // BE Stats
11594 if (debug_params->bestats_debug_params_valid == TRUE) {
11595 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11596 debug_params->bestats_debug_params);
11597 }
11598 // BHIST
11599 if (debug_params->bhist_debug_params_valid == TRUE) {
11600 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11601 debug_params->bhist_debug_params);
11602 }
11603 // 3A Tuning
11604 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11605 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11606 debug_params->q3a_tuning_debug_params);
11607 }
11608 }
11609
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011610 // Add metadata which reprocess needs
11611 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11612 cam_reprocess_info_t *repro_info =
11613 (cam_reprocess_info_t *)frame_settings.find
11614 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011615 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011616 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011617 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011618 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011619 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011620 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011621 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011622 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011623 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011624 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011625 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011626 repro_info->pipeline_flip);
11627 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11628 repro_info->af_roi);
11629 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11630 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011631 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11632 CAM_INTF_PARM_ROTATION metadata then has been added in
11633 translateToHalMetadata. HAL need to keep this new rotation
11634 metadata. Otherwise, the old rotation info saved in the vendor tag
11635 would be used */
11636 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11637 CAM_INTF_PARM_ROTATION, reprocParam) {
11638 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11639 } else {
11640 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011641 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011642 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011643 }
11644
11645 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11646 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11647 roi.width and roi.height would be the final JPEG size.
11648 For now, HAL only checks this for reprocess request */
11649 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11650 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11651 uint8_t *enable =
11652 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11653 if (*enable == TRUE) {
11654 int32_t *crop_data =
11655 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11656 cam_stream_crop_info_t crop_meta;
11657 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11658 crop_meta.stream_id = 0;
11659 crop_meta.crop.left = crop_data[0];
11660 crop_meta.crop.top = crop_data[1];
11661 crop_meta.crop.width = crop_data[2];
11662 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011663 // The JPEG crop roi should match cpp output size
11664 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11665 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11666 crop_meta.roi_map.left = 0;
11667 crop_meta.roi_map.top = 0;
11668 crop_meta.roi_map.width = cpp_crop->crop.width;
11669 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011670 }
11671 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11672 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011673 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011674 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011675 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11676 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011677 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011678 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11679
11680 // Add JPEG scale information
11681 cam_dimension_t scale_dim;
11682 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11683 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11684 int32_t *roi =
11685 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11686 scale_dim.width = roi[2];
11687 scale_dim.height = roi[3];
11688 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11689 scale_dim);
11690 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11691 scale_dim.width, scale_dim.height, mCameraId);
11692 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011693 }
11694 }
11695
11696 return rc;
11697}
11698
11699/*===========================================================================
11700 * FUNCTION : saveRequestSettings
11701 *
11702 * DESCRIPTION: Add any settings that might have changed to the request settings
11703 * and save the settings to be applied on the frame
11704 *
11705 * PARAMETERS :
11706 * @jpegMetadata : the extracted and/or modified jpeg metadata
11707 * @request : request with initial settings
11708 *
11709 * RETURN :
11710 * camera_metadata_t* : pointer to the saved request settings
11711 *==========================================================================*/
11712camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11713 const CameraMetadata &jpegMetadata,
11714 camera3_capture_request_t *request)
11715{
11716 camera_metadata_t *resultMetadata;
11717 CameraMetadata camMetadata;
11718 camMetadata = request->settings;
11719
11720 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11721 int32_t thumbnail_size[2];
11722 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11723 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11724 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11725 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11726 }
11727
11728 if (request->input_buffer != NULL) {
11729 uint8_t reprocessFlags = 1;
11730 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11731 (uint8_t*)&reprocessFlags,
11732 sizeof(reprocessFlags));
11733 }
11734
11735 resultMetadata = camMetadata.release();
11736 return resultMetadata;
11737}
11738
11739/*===========================================================================
11740 * FUNCTION : setHalFpsRange
11741 *
11742 * DESCRIPTION: set FPS range parameter
11743 *
11744 *
11745 * PARAMETERS :
11746 * @settings : Metadata from framework
11747 * @hal_metadata: Metadata buffer
11748 *
11749 *
11750 * RETURN : success: NO_ERROR
11751 * failure:
11752 *==========================================================================*/
11753int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11754 metadata_buffer_t *hal_metadata)
11755{
11756 int32_t rc = NO_ERROR;
11757 cam_fps_range_t fps_range;
11758 fps_range.min_fps = (float)
11759 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11760 fps_range.max_fps = (float)
11761 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11762 fps_range.video_min_fps = fps_range.min_fps;
11763 fps_range.video_max_fps = fps_range.max_fps;
11764
11765 LOGD("aeTargetFpsRange fps: [%f %f]",
11766 fps_range.min_fps, fps_range.max_fps);
11767 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11768 * follows:
11769 * ---------------------------------------------------------------|
11770 * Video stream is absent in configure_streams |
11771 * (Camcorder preview before the first video record |
11772 * ---------------------------------------------------------------|
11773 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11774 * | | | vid_min/max_fps|
11775 * ---------------------------------------------------------------|
11776 * NO | [ 30, 240] | 240 | [240, 240] |
11777 * |-------------|-------------|----------------|
11778 * | [240, 240] | 240 | [240, 240] |
11779 * ---------------------------------------------------------------|
11780 * Video stream is present in configure_streams |
11781 * ---------------------------------------------------------------|
11782 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11783 * | | | vid_min/max_fps|
11784 * ---------------------------------------------------------------|
11785 * NO | [ 30, 240] | 240 | [240, 240] |
11786 * (camcorder prev |-------------|-------------|----------------|
11787 * after video rec | [240, 240] | 240 | [240, 240] |
11788 * is stopped) | | | |
11789 * ---------------------------------------------------------------|
11790 * YES | [ 30, 240] | 240 | [240, 240] |
11791 * |-------------|-------------|----------------|
11792 * | [240, 240] | 240 | [240, 240] |
11793 * ---------------------------------------------------------------|
11794 * When Video stream is absent in configure_streams,
11795 * preview fps = sensor_fps / batchsize
11796 * Eg: for 240fps at batchSize 4, preview = 60fps
11797 * for 120fps at batchSize 4, preview = 30fps
11798 *
11799 * When video stream is present in configure_streams, preview fps is as per
11800 * the ratio of preview buffers to video buffers requested in process
11801 * capture request
11802 */
11803 mBatchSize = 0;
11804 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11805 fps_range.min_fps = fps_range.video_max_fps;
11806 fps_range.video_min_fps = fps_range.video_max_fps;
11807 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11808 fps_range.max_fps);
11809 if (NAME_NOT_FOUND != val) {
11810 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11812 return BAD_VALUE;
11813 }
11814
11815 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11816 /* If batchmode is currently in progress and the fps changes,
11817 * set the flag to restart the sensor */
11818 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11819 (mHFRVideoFps != fps_range.max_fps)) {
11820 mNeedSensorRestart = true;
11821 }
11822 mHFRVideoFps = fps_range.max_fps;
11823 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11824 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11825 mBatchSize = MAX_HFR_BATCH_SIZE;
11826 }
11827 }
11828 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11829
11830 }
11831 } else {
11832 /* HFR mode is session param in backend/ISP. This should be reset when
11833 * in non-HFR mode */
11834 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11835 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11836 return BAD_VALUE;
11837 }
11838 }
11839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11840 return BAD_VALUE;
11841 }
11842 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11843 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11844 return rc;
11845}
11846
11847/*===========================================================================
11848 * FUNCTION : translateToHalMetadata
11849 *
11850 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11851 *
11852 *
11853 * PARAMETERS :
11854 * @request : request sent from framework
11855 *
11856 *
11857 * RETURN : success: NO_ERROR
11858 * failure:
11859 *==========================================================================*/
11860int QCamera3HardwareInterface::translateToHalMetadata
11861 (const camera3_capture_request_t *request,
11862 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011863 uint32_t snapshotStreamId) {
11864 if (request == nullptr || hal_metadata == nullptr) {
11865 return BAD_VALUE;
11866 }
11867
11868 int64_t minFrameDuration = getMinFrameDuration(request);
11869
11870 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11871 minFrameDuration);
11872}
11873
11874int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11875 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11876 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11877
Thierry Strudel3d639192016-09-09 11:52:26 -070011878 int rc = 0;
11879 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011880 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011881
11882 /* Do not change the order of the following list unless you know what you are
11883 * doing.
11884 * The order is laid out in such a way that parameters in the front of the table
11885 * may be used to override the parameters later in the table. Examples are:
11886 * 1. META_MODE should precede AEC/AWB/AF MODE
11887 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11888 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11889 * 4. Any mode should precede it's corresponding settings
11890 */
11891 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11892 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11893 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11894 rc = BAD_VALUE;
11895 }
11896 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11897 if (rc != NO_ERROR) {
11898 LOGE("extractSceneMode failed");
11899 }
11900 }
11901
11902 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11903 uint8_t fwk_aeMode =
11904 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11905 uint8_t aeMode;
11906 int32_t redeye;
11907
11908 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11909 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011910 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11911 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011912 } else {
11913 aeMode = CAM_AE_MODE_ON;
11914 }
11915 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11916 redeye = 1;
11917 } else {
11918 redeye = 0;
11919 }
11920
11921 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11922 fwk_aeMode);
11923 if (NAME_NOT_FOUND != val) {
11924 int32_t flashMode = (int32_t)val;
11925 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11926 }
11927
11928 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11929 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11930 rc = BAD_VALUE;
11931 }
11932 }
11933
11934 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11935 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11936 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11937 fwk_whiteLevel);
11938 if (NAME_NOT_FOUND != val) {
11939 uint8_t whiteLevel = (uint8_t)val;
11940 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11941 rc = BAD_VALUE;
11942 }
11943 }
11944 }
11945
11946 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11947 uint8_t fwk_cacMode =
11948 frame_settings.find(
11949 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11950 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11951 fwk_cacMode);
11952 if (NAME_NOT_FOUND != val) {
11953 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11954 bool entryAvailable = FALSE;
11955 // Check whether Frameworks set CAC mode is supported in device or not
11956 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11957 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11958 entryAvailable = TRUE;
11959 break;
11960 }
11961 }
11962 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11963 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11964 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11965 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11966 if (entryAvailable == FALSE) {
11967 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11968 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11969 } else {
11970 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11971 // High is not supported and so set the FAST as spec say's underlying
11972 // device implementation can be the same for both modes.
11973 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11974 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11975 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11976 // in order to avoid the fps drop due to high quality
11977 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11978 } else {
11979 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11980 }
11981 }
11982 }
11983 LOGD("Final cacMode is %d", cacMode);
11984 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11985 rc = BAD_VALUE;
11986 }
11987 } else {
11988 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11989 }
11990 }
11991
Thierry Strudel2896d122017-02-23 19:18:03 -080011992 char af_value[PROPERTY_VALUE_MAX];
11993 property_get("persist.camera.af.infinity", af_value, "0");
11994
Jason Lee84ae9972017-02-24 13:24:24 -080011995 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011996 if (atoi(af_value) == 0) {
11997 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011998 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011999 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12000 fwk_focusMode);
12001 if (NAME_NOT_FOUND != val) {
12002 uint8_t focusMode = (uint8_t)val;
12003 LOGD("set focus mode %d", focusMode);
12004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12005 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12006 rc = BAD_VALUE;
12007 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012008 }
12009 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012010 } else {
12011 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12012 LOGE("Focus forced to infinity %d", focusMode);
12013 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12014 rc = BAD_VALUE;
12015 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012016 }
12017
Jason Lee84ae9972017-02-24 13:24:24 -080012018 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12019 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012020 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12021 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12022 focalDistance)) {
12023 rc = BAD_VALUE;
12024 }
12025 }
12026
12027 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12028 uint8_t fwk_antibandingMode =
12029 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12030 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12031 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12032 if (NAME_NOT_FOUND != val) {
12033 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012034 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12035 if (m60HzZone) {
12036 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12037 } else {
12038 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12039 }
12040 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12042 hal_antibandingMode)) {
12043 rc = BAD_VALUE;
12044 }
12045 }
12046 }
12047
12048 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12049 int32_t expCompensation = frame_settings.find(
12050 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12051 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12052 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12053 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12054 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012055 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012056 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12057 expCompensation)) {
12058 rc = BAD_VALUE;
12059 }
12060 }
12061
12062 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12063 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12064 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12065 rc = BAD_VALUE;
12066 }
12067 }
12068 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12069 rc = setHalFpsRange(frame_settings, hal_metadata);
12070 if (rc != NO_ERROR) {
12071 LOGE("setHalFpsRange failed");
12072 }
12073 }
12074
12075 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12076 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12077 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12078 rc = BAD_VALUE;
12079 }
12080 }
12081
12082 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12083 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12084 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12085 fwk_effectMode);
12086 if (NAME_NOT_FOUND != val) {
12087 uint8_t effectMode = (uint8_t)val;
12088 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12089 rc = BAD_VALUE;
12090 }
12091 }
12092 }
12093
12094 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12095 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12096 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12097 colorCorrectMode)) {
12098 rc = BAD_VALUE;
12099 }
12100 }
12101
12102 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12103 cam_color_correct_gains_t colorCorrectGains;
12104 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12105 colorCorrectGains.gains[i] =
12106 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12107 }
12108 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12109 colorCorrectGains)) {
12110 rc = BAD_VALUE;
12111 }
12112 }
12113
12114 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12115 cam_color_correct_matrix_t colorCorrectTransform;
12116 cam_rational_type_t transform_elem;
12117 size_t num = 0;
12118 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12119 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12120 transform_elem.numerator =
12121 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12122 transform_elem.denominator =
12123 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12124 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12125 num++;
12126 }
12127 }
12128 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12129 colorCorrectTransform)) {
12130 rc = BAD_VALUE;
12131 }
12132 }
12133
12134 cam_trigger_t aecTrigger;
12135 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12136 aecTrigger.trigger_id = -1;
12137 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12138 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12139 aecTrigger.trigger =
12140 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12141 aecTrigger.trigger_id =
12142 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12144 aecTrigger)) {
12145 rc = BAD_VALUE;
12146 }
12147 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12148 aecTrigger.trigger, aecTrigger.trigger_id);
12149 }
12150
12151 /*af_trigger must come with a trigger id*/
12152 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12153 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12154 cam_trigger_t af_trigger;
12155 af_trigger.trigger =
12156 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12157 af_trigger.trigger_id =
12158 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12159 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12160 rc = BAD_VALUE;
12161 }
12162 LOGD("AfTrigger: %d AfTriggerID: %d",
12163 af_trigger.trigger, af_trigger.trigger_id);
12164 }
12165
12166 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12167 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12168 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12169 rc = BAD_VALUE;
12170 }
12171 }
12172 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12173 cam_edge_application_t edge_application;
12174 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012175
Thierry Strudel3d639192016-09-09 11:52:26 -070012176 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12177 edge_application.sharpness = 0;
12178 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012179 edge_application.sharpness =
12180 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12181 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12182 int32_t sharpness =
12183 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12184 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12185 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12186 LOGD("Setting edge mode sharpness %d", sharpness);
12187 edge_application.sharpness = sharpness;
12188 }
12189 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012190 }
12191 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12192 rc = BAD_VALUE;
12193 }
12194 }
12195
12196 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12197 int32_t respectFlashMode = 1;
12198 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12199 uint8_t fwk_aeMode =
12200 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012201 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12202 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12203 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012204 respectFlashMode = 0;
12205 LOGH("AE Mode controls flash, ignore android.flash.mode");
12206 }
12207 }
12208 if (respectFlashMode) {
12209 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12210 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12211 LOGH("flash mode after mapping %d", val);
12212 // To check: CAM_INTF_META_FLASH_MODE usage
12213 if (NAME_NOT_FOUND != val) {
12214 uint8_t flashMode = (uint8_t)val;
12215 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12216 rc = BAD_VALUE;
12217 }
12218 }
12219 }
12220 }
12221
12222 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12223 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12224 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12225 rc = BAD_VALUE;
12226 }
12227 }
12228
12229 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12230 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12231 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12232 flashFiringTime)) {
12233 rc = BAD_VALUE;
12234 }
12235 }
12236
12237 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12238 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12239 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12240 hotPixelMode)) {
12241 rc = BAD_VALUE;
12242 }
12243 }
12244
12245 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12246 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12247 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12248 lensAperture)) {
12249 rc = BAD_VALUE;
12250 }
12251 }
12252
12253 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12254 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12255 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12256 filterDensity)) {
12257 rc = BAD_VALUE;
12258 }
12259 }
12260
12261 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12262 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12263 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12264 focalLength)) {
12265 rc = BAD_VALUE;
12266 }
12267 }
12268
12269 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12270 uint8_t optStabMode =
12271 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12272 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12273 optStabMode)) {
12274 rc = BAD_VALUE;
12275 }
12276 }
12277
12278 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12279 uint8_t videoStabMode =
12280 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12281 LOGD("videoStabMode from APP = %d", videoStabMode);
12282 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12283 videoStabMode)) {
12284 rc = BAD_VALUE;
12285 }
12286 }
12287
12288
12289 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12290 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12291 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12292 noiseRedMode)) {
12293 rc = BAD_VALUE;
12294 }
12295 }
12296
12297 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12298 float reprocessEffectiveExposureFactor =
12299 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12300 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12301 reprocessEffectiveExposureFactor)) {
12302 rc = BAD_VALUE;
12303 }
12304 }
12305
12306 cam_crop_region_t scalerCropRegion;
12307 bool scalerCropSet = false;
12308 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12309 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12310 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12311 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12312 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12313
12314 // Map coordinate system from active array to sensor output.
12315 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12316 scalerCropRegion.width, scalerCropRegion.height);
12317
12318 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12319 scalerCropRegion)) {
12320 rc = BAD_VALUE;
12321 }
12322 scalerCropSet = true;
12323 }
12324
12325 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12326 int64_t sensorExpTime =
12327 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12328 LOGD("setting sensorExpTime %lld", sensorExpTime);
12329 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12330 sensorExpTime)) {
12331 rc = BAD_VALUE;
12332 }
12333 }
12334
12335 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12336 int64_t sensorFrameDuration =
12337 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012338 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12339 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12340 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12341 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12342 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12343 sensorFrameDuration)) {
12344 rc = BAD_VALUE;
12345 }
12346 }
12347
12348 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12349 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12350 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12351 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12352 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12353 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12354 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12355 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12356 sensorSensitivity)) {
12357 rc = BAD_VALUE;
12358 }
12359 }
12360
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012361#ifndef USE_HAL_3_3
12362 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12363 int32_t ispSensitivity =
12364 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12365 if (ispSensitivity <
12366 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12367 ispSensitivity =
12368 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12369 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12370 }
12371 if (ispSensitivity >
12372 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12373 ispSensitivity =
12374 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12375 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12376 }
12377 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12378 ispSensitivity)) {
12379 rc = BAD_VALUE;
12380 }
12381 }
12382#endif
12383
Thierry Strudel3d639192016-09-09 11:52:26 -070012384 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12385 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12386 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12387 rc = BAD_VALUE;
12388 }
12389 }
12390
12391 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12392 uint8_t fwk_facedetectMode =
12393 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12394
12395 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12396 fwk_facedetectMode);
12397
12398 if (NAME_NOT_FOUND != val) {
12399 uint8_t facedetectMode = (uint8_t)val;
12400 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12401 facedetectMode)) {
12402 rc = BAD_VALUE;
12403 }
12404 }
12405 }
12406
Thierry Strudel54dc9782017-02-15 12:12:10 -080012407 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012408 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012409 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012410 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12411 histogramMode)) {
12412 rc = BAD_VALUE;
12413 }
12414 }
12415
12416 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12417 uint8_t sharpnessMapMode =
12418 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12419 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12420 sharpnessMapMode)) {
12421 rc = BAD_VALUE;
12422 }
12423 }
12424
12425 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12426 uint8_t tonemapMode =
12427 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12428 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12429 rc = BAD_VALUE;
12430 }
12431 }
12432 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12433 /*All tonemap channels will have the same number of points*/
12434 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12435 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12436 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12437 cam_rgb_tonemap_curves tonemapCurves;
12438 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12439 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12440 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12441 tonemapCurves.tonemap_points_cnt,
12442 CAM_MAX_TONEMAP_CURVE_SIZE);
12443 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12444 }
12445
12446 /* ch0 = G*/
12447 size_t point = 0;
12448 cam_tonemap_curve_t tonemapCurveGreen;
12449 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12450 for (size_t j = 0; j < 2; j++) {
12451 tonemapCurveGreen.tonemap_points[i][j] =
12452 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12453 point++;
12454 }
12455 }
12456 tonemapCurves.curves[0] = tonemapCurveGreen;
12457
12458 /* ch 1 = B */
12459 point = 0;
12460 cam_tonemap_curve_t tonemapCurveBlue;
12461 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12462 for (size_t j = 0; j < 2; j++) {
12463 tonemapCurveBlue.tonemap_points[i][j] =
12464 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12465 point++;
12466 }
12467 }
12468 tonemapCurves.curves[1] = tonemapCurveBlue;
12469
12470 /* ch 2 = R */
12471 point = 0;
12472 cam_tonemap_curve_t tonemapCurveRed;
12473 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12474 for (size_t j = 0; j < 2; j++) {
12475 tonemapCurveRed.tonemap_points[i][j] =
12476 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12477 point++;
12478 }
12479 }
12480 tonemapCurves.curves[2] = tonemapCurveRed;
12481
12482 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12483 tonemapCurves)) {
12484 rc = BAD_VALUE;
12485 }
12486 }
12487
12488 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12489 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12490 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12491 captureIntent)) {
12492 rc = BAD_VALUE;
12493 }
12494 }
12495
12496 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12497 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12499 blackLevelLock)) {
12500 rc = BAD_VALUE;
12501 }
12502 }
12503
12504 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12505 uint8_t lensShadingMapMode =
12506 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12507 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12508 lensShadingMapMode)) {
12509 rc = BAD_VALUE;
12510 }
12511 }
12512
12513 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12514 cam_area_t roi;
12515 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012516 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012517
12518 // Map coordinate system from active array to sensor output.
12519 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12520 roi.rect.height);
12521
12522 if (scalerCropSet) {
12523 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12524 }
12525 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12526 rc = BAD_VALUE;
12527 }
12528 }
12529
12530 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12531 cam_area_t roi;
12532 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012533 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012534
12535 // Map coordinate system from active array to sensor output.
12536 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12537 roi.rect.height);
12538
12539 if (scalerCropSet) {
12540 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12541 }
12542 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12543 rc = BAD_VALUE;
12544 }
12545 }
12546
12547 // CDS for non-HFR non-video mode
12548 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12549 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12550 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12551 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12552 LOGE("Invalid CDS mode %d!", *fwk_cds);
12553 } else {
12554 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12555 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12556 rc = BAD_VALUE;
12557 }
12558 }
12559 }
12560
Thierry Strudel04e026f2016-10-10 11:27:36 -070012561 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012562 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012563 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012564 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12565 }
12566 if (m_bVideoHdrEnabled)
12567 vhdr = CAM_VIDEO_HDR_MODE_ON;
12568
Thierry Strudel54dc9782017-02-15 12:12:10 -080012569 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12570
12571 if(vhdr != curr_hdr_state)
12572 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12573
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012574 rc = setVideoHdrMode(mParameters, vhdr);
12575 if (rc != NO_ERROR) {
12576 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012577 }
12578
12579 //IR
12580 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12581 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12582 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012583 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12584 uint8_t isIRon = 0;
12585
12586 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012587 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12588 LOGE("Invalid IR mode %d!", fwk_ir);
12589 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012590 if(isIRon != curr_ir_state )
12591 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12592
Thierry Strudel04e026f2016-10-10 11:27:36 -070012593 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12594 CAM_INTF_META_IR_MODE, fwk_ir)) {
12595 rc = BAD_VALUE;
12596 }
12597 }
12598 }
12599
Thierry Strudel54dc9782017-02-15 12:12:10 -080012600 //Binning Correction Mode
12601 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12602 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12603 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12604 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12605 || (0 > fwk_binning_correction)) {
12606 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12607 } else {
12608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12609 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12610 rc = BAD_VALUE;
12611 }
12612 }
12613 }
12614
Thierry Strudel269c81a2016-10-12 12:13:59 -070012615 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12616 float aec_speed;
12617 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12618 LOGD("AEC Speed :%f", aec_speed);
12619 if ( aec_speed < 0 ) {
12620 LOGE("Invalid AEC mode %f!", aec_speed);
12621 } else {
12622 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12623 aec_speed)) {
12624 rc = BAD_VALUE;
12625 }
12626 }
12627 }
12628
12629 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12630 float awb_speed;
12631 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12632 LOGD("AWB Speed :%f", awb_speed);
12633 if ( awb_speed < 0 ) {
12634 LOGE("Invalid AWB mode %f!", awb_speed);
12635 } else {
12636 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12637 awb_speed)) {
12638 rc = BAD_VALUE;
12639 }
12640 }
12641 }
12642
Thierry Strudel3d639192016-09-09 11:52:26 -070012643 // TNR
12644 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12645 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12646 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012647 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012648 cam_denoise_param_t tnr;
12649 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12650 tnr.process_plates =
12651 (cam_denoise_process_type_t)frame_settings.find(
12652 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12653 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012654
12655 if(b_TnrRequested != curr_tnr_state)
12656 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12657
Thierry Strudel3d639192016-09-09 11:52:26 -070012658 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12659 rc = BAD_VALUE;
12660 }
12661 }
12662
Thierry Strudel54dc9782017-02-15 12:12:10 -080012663 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012664 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012665 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012666 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12667 *exposure_metering_mode)) {
12668 rc = BAD_VALUE;
12669 }
12670 }
12671
Thierry Strudel3d639192016-09-09 11:52:26 -070012672 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12673 int32_t fwk_testPatternMode =
12674 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12675 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12676 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12677
12678 if (NAME_NOT_FOUND != testPatternMode) {
12679 cam_test_pattern_data_t testPatternData;
12680 memset(&testPatternData, 0, sizeof(testPatternData));
12681 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12682 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12683 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12684 int32_t *fwk_testPatternData =
12685 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12686 testPatternData.r = fwk_testPatternData[0];
12687 testPatternData.b = fwk_testPatternData[3];
12688 switch (gCamCapability[mCameraId]->color_arrangement) {
12689 case CAM_FILTER_ARRANGEMENT_RGGB:
12690 case CAM_FILTER_ARRANGEMENT_GRBG:
12691 testPatternData.gr = fwk_testPatternData[1];
12692 testPatternData.gb = fwk_testPatternData[2];
12693 break;
12694 case CAM_FILTER_ARRANGEMENT_GBRG:
12695 case CAM_FILTER_ARRANGEMENT_BGGR:
12696 testPatternData.gr = fwk_testPatternData[2];
12697 testPatternData.gb = fwk_testPatternData[1];
12698 break;
12699 default:
12700 LOGE("color arrangement %d is not supported",
12701 gCamCapability[mCameraId]->color_arrangement);
12702 break;
12703 }
12704 }
12705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12706 testPatternData)) {
12707 rc = BAD_VALUE;
12708 }
12709 } else {
12710 LOGE("Invalid framework sensor test pattern mode %d",
12711 fwk_testPatternMode);
12712 }
12713 }
12714
12715 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12716 size_t count = 0;
12717 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12718 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12719 gps_coords.data.d, gps_coords.count, count);
12720 if (gps_coords.count != count) {
12721 rc = BAD_VALUE;
12722 }
12723 }
12724
12725 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12726 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12727 size_t count = 0;
12728 const char *gps_methods_src = (const char *)
12729 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12730 memset(gps_methods, '\0', sizeof(gps_methods));
12731 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12732 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12733 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12734 if (GPS_PROCESSING_METHOD_SIZE != count) {
12735 rc = BAD_VALUE;
12736 }
12737 }
12738
12739 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12740 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12742 gps_timestamp)) {
12743 rc = BAD_VALUE;
12744 }
12745 }
12746
12747 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12748 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12749 cam_rotation_info_t rotation_info;
12750 if (orientation == 0) {
12751 rotation_info.rotation = ROTATE_0;
12752 } else if (orientation == 90) {
12753 rotation_info.rotation = ROTATE_90;
12754 } else if (orientation == 180) {
12755 rotation_info.rotation = ROTATE_180;
12756 } else if (orientation == 270) {
12757 rotation_info.rotation = ROTATE_270;
12758 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012759 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012760 rotation_info.streamId = snapshotStreamId;
12761 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12763 rc = BAD_VALUE;
12764 }
12765 }
12766
12767 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12768 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12769 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12770 rc = BAD_VALUE;
12771 }
12772 }
12773
12774 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12775 uint32_t thumb_quality = (uint32_t)
12776 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12777 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12778 thumb_quality)) {
12779 rc = BAD_VALUE;
12780 }
12781 }
12782
12783 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12784 cam_dimension_t dim;
12785 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12786 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12788 rc = BAD_VALUE;
12789 }
12790 }
12791
12792 // Internal metadata
12793 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12794 size_t count = 0;
12795 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12796 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12797 privatedata.data.i32, privatedata.count, count);
12798 if (privatedata.count != count) {
12799 rc = BAD_VALUE;
12800 }
12801 }
12802
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012803 // ISO/Exposure Priority
12804 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12805 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12806 cam_priority_mode_t mode =
12807 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12808 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12809 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12810 use_iso_exp_pty.previewOnly = FALSE;
12811 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12812 use_iso_exp_pty.value = *ptr;
12813
12814 if(CAM_ISO_PRIORITY == mode) {
12815 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12816 use_iso_exp_pty)) {
12817 rc = BAD_VALUE;
12818 }
12819 }
12820 else {
12821 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12822 use_iso_exp_pty)) {
12823 rc = BAD_VALUE;
12824 }
12825 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012826
12827 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12828 rc = BAD_VALUE;
12829 }
12830 }
12831 } else {
12832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12833 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012834 }
12835 }
12836
12837 // Saturation
12838 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12839 int32_t* use_saturation =
12840 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12841 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12842 rc = BAD_VALUE;
12843 }
12844 }
12845
Thierry Strudel3d639192016-09-09 11:52:26 -070012846 // EV step
12847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12848 gCamCapability[mCameraId]->exp_compensation_step)) {
12849 rc = BAD_VALUE;
12850 }
12851
12852 // CDS info
12853 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12854 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12855 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12856
12857 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12858 CAM_INTF_META_CDS_DATA, *cdsData)) {
12859 rc = BAD_VALUE;
12860 }
12861 }
12862
Shuzhen Wang19463d72016-03-08 11:09:52 -080012863 // Hybrid AE
12864 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12865 uint8_t *hybrid_ae = (uint8_t *)
12866 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12867
12868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12869 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12870 rc = BAD_VALUE;
12871 }
12872 }
12873
Shuzhen Wang14415f52016-11-16 18:26:18 -080012874 // Histogram
12875 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12876 uint8_t histogramMode =
12877 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12879 histogramMode)) {
12880 rc = BAD_VALUE;
12881 }
12882 }
12883
12884 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12885 int32_t histogramBins =
12886 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12887 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12888 histogramBins)) {
12889 rc = BAD_VALUE;
12890 }
12891 }
12892
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012893 // Tracking AF
12894 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12895 uint8_t trackingAfTrigger =
12896 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12897 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12898 trackingAfTrigger)) {
12899 rc = BAD_VALUE;
12900 }
12901 }
12902
Thierry Strudel3d639192016-09-09 11:52:26 -070012903 return rc;
12904}
12905
12906/*===========================================================================
12907 * FUNCTION : captureResultCb
12908 *
12909 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12910 *
12911 * PARAMETERS :
12912 * @frame : frame information from mm-camera-interface
12913 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12914 * @userdata: userdata
12915 *
12916 * RETURN : NONE
12917 *==========================================================================*/
12918void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12919 camera3_stream_buffer_t *buffer,
12920 uint32_t frame_number, bool isInputBuffer, void *userdata)
12921{
12922 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12923 if (hw == NULL) {
12924 LOGE("Invalid hw %p", hw);
12925 return;
12926 }
12927
12928 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12929 return;
12930}
12931
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012932/*===========================================================================
12933 * FUNCTION : setBufferErrorStatus
12934 *
12935 * DESCRIPTION: Callback handler for channels to report any buffer errors
12936 *
12937 * PARAMETERS :
12938 * @ch : Channel on which buffer error is reported from
12939 * @frame_number : frame number on which buffer error is reported on
12940 * @buffer_status : buffer error status
12941 * @userdata: userdata
12942 *
12943 * RETURN : NONE
12944 *==========================================================================*/
12945void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12946 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12947{
12948 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12949 if (hw == NULL) {
12950 LOGE("Invalid hw %p", hw);
12951 return;
12952 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012953
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012954 hw->setBufferErrorStatus(ch, frame_number, err);
12955 return;
12956}
12957
12958void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12959 uint32_t frameNumber, camera3_buffer_status_t err)
12960{
12961 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12962 pthread_mutex_lock(&mMutex);
12963
12964 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12965 if (req.frame_number != frameNumber)
12966 continue;
12967 for (auto& k : req.mPendingBufferList) {
12968 if(k.stream->priv == ch) {
12969 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12970 }
12971 }
12972 }
12973
12974 pthread_mutex_unlock(&mMutex);
12975 return;
12976}
Thierry Strudel3d639192016-09-09 11:52:26 -070012977/*===========================================================================
12978 * FUNCTION : initialize
12979 *
12980 * DESCRIPTION: Pass framework callback pointers to HAL
12981 *
12982 * PARAMETERS :
12983 *
12984 *
12985 * RETURN : Success : 0
12986 * Failure: -ENODEV
12987 *==========================================================================*/
12988
12989int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12990 const camera3_callback_ops_t *callback_ops)
12991{
12992 LOGD("E");
12993 QCamera3HardwareInterface *hw =
12994 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12995 if (!hw) {
12996 LOGE("NULL camera device");
12997 return -ENODEV;
12998 }
12999
13000 int rc = hw->initialize(callback_ops);
13001 LOGD("X");
13002 return rc;
13003}
13004
13005/*===========================================================================
13006 * FUNCTION : configure_streams
13007 *
13008 * DESCRIPTION:
13009 *
13010 * PARAMETERS :
13011 *
13012 *
13013 * RETURN : Success: 0
13014 * Failure: -EINVAL (if stream configuration is invalid)
13015 * -ENODEV (fatal error)
13016 *==========================================================================*/
13017
13018int QCamera3HardwareInterface::configure_streams(
13019 const struct camera3_device *device,
13020 camera3_stream_configuration_t *stream_list)
13021{
13022 LOGD("E");
13023 QCamera3HardwareInterface *hw =
13024 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13025 if (!hw) {
13026 LOGE("NULL camera device");
13027 return -ENODEV;
13028 }
13029 int rc = hw->configureStreams(stream_list);
13030 LOGD("X");
13031 return rc;
13032}
13033
13034/*===========================================================================
13035 * FUNCTION : construct_default_request_settings
13036 *
13037 * DESCRIPTION: Configure a settings buffer to meet the required use case
13038 *
13039 * PARAMETERS :
13040 *
13041 *
13042 * RETURN : Success: Return valid metadata
13043 * Failure: Return NULL
13044 *==========================================================================*/
13045const camera_metadata_t* QCamera3HardwareInterface::
13046 construct_default_request_settings(const struct camera3_device *device,
13047 int type)
13048{
13049
13050 LOGD("E");
13051 camera_metadata_t* fwk_metadata = NULL;
13052 QCamera3HardwareInterface *hw =
13053 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13054 if (!hw) {
13055 LOGE("NULL camera device");
13056 return NULL;
13057 }
13058
13059 fwk_metadata = hw->translateCapabilityToMetadata(type);
13060
13061 LOGD("X");
13062 return fwk_metadata;
13063}
13064
13065/*===========================================================================
13066 * FUNCTION : process_capture_request
13067 *
13068 * DESCRIPTION:
13069 *
13070 * PARAMETERS :
13071 *
13072 *
13073 * RETURN :
13074 *==========================================================================*/
13075int QCamera3HardwareInterface::process_capture_request(
13076 const struct camera3_device *device,
13077 camera3_capture_request_t *request)
13078{
13079 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013080 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013081 QCamera3HardwareInterface *hw =
13082 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13083 if (!hw) {
13084 LOGE("NULL camera device");
13085 return -EINVAL;
13086 }
13087
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013088 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013089 LOGD("X");
13090 return rc;
13091}
13092
13093/*===========================================================================
13094 * FUNCTION : dump
13095 *
13096 * DESCRIPTION:
13097 *
13098 * PARAMETERS :
13099 *
13100 *
13101 * RETURN :
13102 *==========================================================================*/
13103
13104void QCamera3HardwareInterface::dump(
13105 const struct camera3_device *device, int fd)
13106{
13107 /* Log level property is read when "adb shell dumpsys media.camera" is
13108 called so that the log level can be controlled without restarting
13109 the media server */
13110 getLogLevel();
13111
13112 LOGD("E");
13113 QCamera3HardwareInterface *hw =
13114 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13115 if (!hw) {
13116 LOGE("NULL camera device");
13117 return;
13118 }
13119
13120 hw->dump(fd);
13121 LOGD("X");
13122 return;
13123}
13124
13125/*===========================================================================
13126 * FUNCTION : flush
13127 *
13128 * DESCRIPTION:
13129 *
13130 * PARAMETERS :
13131 *
13132 *
13133 * RETURN :
13134 *==========================================================================*/
13135
13136int QCamera3HardwareInterface::flush(
13137 const struct camera3_device *device)
13138{
13139 int rc;
13140 LOGD("E");
13141 QCamera3HardwareInterface *hw =
13142 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13143 if (!hw) {
13144 LOGE("NULL camera device");
13145 return -EINVAL;
13146 }
13147
13148 pthread_mutex_lock(&hw->mMutex);
13149 // Validate current state
13150 switch (hw->mState) {
13151 case STARTED:
13152 /* valid state */
13153 break;
13154
13155 case ERROR:
13156 pthread_mutex_unlock(&hw->mMutex);
13157 hw->handleCameraDeviceError();
13158 return -ENODEV;
13159
13160 default:
13161 LOGI("Flush returned during state %d", hw->mState);
13162 pthread_mutex_unlock(&hw->mMutex);
13163 return 0;
13164 }
13165 pthread_mutex_unlock(&hw->mMutex);
13166
13167 rc = hw->flush(true /* restart channels */ );
13168 LOGD("X");
13169 return rc;
13170}
13171
13172/*===========================================================================
13173 * FUNCTION : close_camera_device
13174 *
13175 * DESCRIPTION:
13176 *
13177 * PARAMETERS :
13178 *
13179 *
13180 * RETURN :
13181 *==========================================================================*/
13182int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13183{
13184 int ret = NO_ERROR;
13185 QCamera3HardwareInterface *hw =
13186 reinterpret_cast<QCamera3HardwareInterface *>(
13187 reinterpret_cast<camera3_device_t *>(device)->priv);
13188 if (!hw) {
13189 LOGE("NULL camera device");
13190 return BAD_VALUE;
13191 }
13192
13193 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13194 delete hw;
13195 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013196 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013197 return ret;
13198}
13199
13200/*===========================================================================
13201 * FUNCTION : getWaveletDenoiseProcessPlate
13202 *
13203 * DESCRIPTION: query wavelet denoise process plate
13204 *
13205 * PARAMETERS : None
13206 *
13207 * RETURN : WNR prcocess plate value
13208 *==========================================================================*/
13209cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13210{
13211 char prop[PROPERTY_VALUE_MAX];
13212 memset(prop, 0, sizeof(prop));
13213 property_get("persist.denoise.process.plates", prop, "0");
13214 int processPlate = atoi(prop);
13215 switch(processPlate) {
13216 case 0:
13217 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13218 case 1:
13219 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13220 case 2:
13221 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13222 case 3:
13223 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13224 default:
13225 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13226 }
13227}
13228
13229
13230/*===========================================================================
13231 * FUNCTION : getTemporalDenoiseProcessPlate
13232 *
13233 * DESCRIPTION: query temporal denoise process plate
13234 *
13235 * PARAMETERS : None
13236 *
13237 * RETURN : TNR prcocess plate value
13238 *==========================================================================*/
13239cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13240{
13241 char prop[PROPERTY_VALUE_MAX];
13242 memset(prop, 0, sizeof(prop));
13243 property_get("persist.tnr.process.plates", prop, "0");
13244 int processPlate = atoi(prop);
13245 switch(processPlate) {
13246 case 0:
13247 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13248 case 1:
13249 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13250 case 2:
13251 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13252 case 3:
13253 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13254 default:
13255 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13256 }
13257}
13258
13259
13260/*===========================================================================
13261 * FUNCTION : extractSceneMode
13262 *
13263 * DESCRIPTION: Extract scene mode from frameworks set metadata
13264 *
13265 * PARAMETERS :
13266 * @frame_settings: CameraMetadata reference
13267 * @metaMode: ANDROID_CONTORL_MODE
13268 * @hal_metadata: hal metadata structure
13269 *
13270 * RETURN : None
13271 *==========================================================================*/
13272int32_t QCamera3HardwareInterface::extractSceneMode(
13273 const CameraMetadata &frame_settings, uint8_t metaMode,
13274 metadata_buffer_t *hal_metadata)
13275{
13276 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013277 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13278
13279 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13280 LOGD("Ignoring control mode OFF_KEEP_STATE");
13281 return NO_ERROR;
13282 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013283
13284 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13285 camera_metadata_ro_entry entry =
13286 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13287 if (0 == entry.count)
13288 return rc;
13289
13290 uint8_t fwk_sceneMode = entry.data.u8[0];
13291
13292 int val = lookupHalName(SCENE_MODES_MAP,
13293 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13294 fwk_sceneMode);
13295 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013296 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013297 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013298 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013299 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013300
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013301 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13302 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13303 }
13304
13305 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13306 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013307 cam_hdr_param_t hdr_params;
13308 hdr_params.hdr_enable = 1;
13309 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13310 hdr_params.hdr_need_1x = false;
13311 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13312 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13313 rc = BAD_VALUE;
13314 }
13315 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013316
Thierry Strudel3d639192016-09-09 11:52:26 -070013317 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13318 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13319 rc = BAD_VALUE;
13320 }
13321 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013322
13323 if (mForceHdrSnapshot) {
13324 cam_hdr_param_t hdr_params;
13325 hdr_params.hdr_enable = 1;
13326 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13327 hdr_params.hdr_need_1x = false;
13328 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13329 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13330 rc = BAD_VALUE;
13331 }
13332 }
13333
Thierry Strudel3d639192016-09-09 11:52:26 -070013334 return rc;
13335}
13336
13337/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013338 * FUNCTION : setVideoHdrMode
13339 *
13340 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13341 *
13342 * PARAMETERS :
13343 * @hal_metadata: hal metadata structure
13344 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13345 *
13346 * RETURN : None
13347 *==========================================================================*/
13348int32_t QCamera3HardwareInterface::setVideoHdrMode(
13349 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13350{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013351 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13352 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13353 }
13354
13355 LOGE("Invalid Video HDR mode %d!", vhdr);
13356 return BAD_VALUE;
13357}
13358
13359/*===========================================================================
13360 * FUNCTION : setSensorHDR
13361 *
13362 * DESCRIPTION: Enable/disable sensor HDR.
13363 *
13364 * PARAMETERS :
13365 * @hal_metadata: hal metadata structure
13366 * @enable: boolean whether to enable/disable sensor HDR
13367 *
13368 * RETURN : None
13369 *==========================================================================*/
13370int32_t QCamera3HardwareInterface::setSensorHDR(
13371 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13372{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013373 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013374 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13375
13376 if (enable) {
13377 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13378 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13379 #ifdef _LE_CAMERA_
13380 //Default to staggered HDR for IOT
13381 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13382 #else
13383 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13384 #endif
13385 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13386 }
13387
13388 bool isSupported = false;
13389 switch (sensor_hdr) {
13390 case CAM_SENSOR_HDR_IN_SENSOR:
13391 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13392 CAM_QCOM_FEATURE_SENSOR_HDR) {
13393 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013394 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013395 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013396 break;
13397 case CAM_SENSOR_HDR_ZIGZAG:
13398 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13399 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13400 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013401 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013402 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013403 break;
13404 case CAM_SENSOR_HDR_STAGGERED:
13405 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13406 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13407 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013408 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013409 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013410 break;
13411 case CAM_SENSOR_HDR_OFF:
13412 isSupported = true;
13413 LOGD("Turning off sensor HDR");
13414 break;
13415 default:
13416 LOGE("HDR mode %d not supported", sensor_hdr);
13417 rc = BAD_VALUE;
13418 break;
13419 }
13420
13421 if(isSupported) {
13422 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13423 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13424 rc = BAD_VALUE;
13425 } else {
13426 if(!isVideoHdrEnable)
13427 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013428 }
13429 }
13430 return rc;
13431}
13432
13433/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013434 * FUNCTION : needRotationReprocess
13435 *
13436 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13437 *
13438 * PARAMETERS : none
13439 *
13440 * RETURN : true: needed
13441 * false: no need
13442 *==========================================================================*/
13443bool QCamera3HardwareInterface::needRotationReprocess()
13444{
13445 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13446 // current rotation is not zero, and pp has the capability to process rotation
13447 LOGH("need do reprocess for rotation");
13448 return true;
13449 }
13450
13451 return false;
13452}
13453
13454/*===========================================================================
13455 * FUNCTION : needReprocess
13456 *
13457 * DESCRIPTION: if reprocess in needed
13458 *
13459 * PARAMETERS : none
13460 *
13461 * RETURN : true: needed
13462 * false: no need
13463 *==========================================================================*/
13464bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13465{
13466 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13467 // TODO: add for ZSL HDR later
13468 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13469 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13470 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13471 return true;
13472 } else {
13473 LOGH("already post processed frame");
13474 return false;
13475 }
13476 }
13477 return needRotationReprocess();
13478}
13479
13480/*===========================================================================
13481 * FUNCTION : needJpegExifRotation
13482 *
13483 * DESCRIPTION: if rotation from jpeg is needed
13484 *
13485 * PARAMETERS : none
13486 *
13487 * RETURN : true: needed
13488 * false: no need
13489 *==========================================================================*/
13490bool QCamera3HardwareInterface::needJpegExifRotation()
13491{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013492 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013493 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13494 LOGD("Need use Jpeg EXIF Rotation");
13495 return true;
13496 }
13497 return false;
13498}
13499
13500/*===========================================================================
13501 * FUNCTION : addOfflineReprocChannel
13502 *
13503 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13504 * coming from input channel
13505 *
13506 * PARAMETERS :
13507 * @config : reprocess configuration
13508 * @inputChHandle : pointer to the input (source) channel
13509 *
13510 *
13511 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13512 *==========================================================================*/
13513QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13514 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13515{
13516 int32_t rc = NO_ERROR;
13517 QCamera3ReprocessChannel *pChannel = NULL;
13518
13519 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013520 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13521 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013522 if (NULL == pChannel) {
13523 LOGE("no mem for reprocess channel");
13524 return NULL;
13525 }
13526
13527 rc = pChannel->initialize(IS_TYPE_NONE);
13528 if (rc != NO_ERROR) {
13529 LOGE("init reprocess channel failed, ret = %d", rc);
13530 delete pChannel;
13531 return NULL;
13532 }
13533
13534 // pp feature config
13535 cam_pp_feature_config_t pp_config;
13536 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13537
13538 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13539 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13540 & CAM_QCOM_FEATURE_DSDN) {
13541 //Use CPP CDS incase h/w supports it.
13542 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13543 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13544 }
13545 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13546 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13547 }
13548
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013549 if (config.hdr_param.hdr_enable) {
13550 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13551 pp_config.hdr_param = config.hdr_param;
13552 }
13553
13554 if (mForceHdrSnapshot) {
13555 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13556 pp_config.hdr_param.hdr_enable = 1;
13557 pp_config.hdr_param.hdr_need_1x = 0;
13558 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13559 }
13560
Thierry Strudel3d639192016-09-09 11:52:26 -070013561 rc = pChannel->addReprocStreamsFromSource(pp_config,
13562 config,
13563 IS_TYPE_NONE,
13564 mMetadataChannel);
13565
13566 if (rc != NO_ERROR) {
13567 delete pChannel;
13568 return NULL;
13569 }
13570 return pChannel;
13571}
13572
13573/*===========================================================================
13574 * FUNCTION : getMobicatMask
13575 *
13576 * DESCRIPTION: returns mobicat mask
13577 *
13578 * PARAMETERS : none
13579 *
13580 * RETURN : mobicat mask
13581 *
13582 *==========================================================================*/
13583uint8_t QCamera3HardwareInterface::getMobicatMask()
13584{
13585 return m_MobicatMask;
13586}
13587
13588/*===========================================================================
13589 * FUNCTION : setMobicat
13590 *
13591 * DESCRIPTION: set Mobicat on/off.
13592 *
13593 * PARAMETERS :
13594 * @params : none
13595 *
13596 * RETURN : int32_t type of status
13597 * NO_ERROR -- success
13598 * none-zero failure code
13599 *==========================================================================*/
13600int32_t QCamera3HardwareInterface::setMobicat()
13601{
13602 char value [PROPERTY_VALUE_MAX];
13603 property_get("persist.camera.mobicat", value, "0");
13604 int32_t ret = NO_ERROR;
13605 uint8_t enableMobi = (uint8_t)atoi(value);
13606
13607 if (enableMobi) {
13608 tune_cmd_t tune_cmd;
13609 tune_cmd.type = SET_RELOAD_CHROMATIX;
13610 tune_cmd.module = MODULE_ALL;
13611 tune_cmd.value = TRUE;
13612 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13613 CAM_INTF_PARM_SET_VFE_COMMAND,
13614 tune_cmd);
13615
13616 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13617 CAM_INTF_PARM_SET_PP_COMMAND,
13618 tune_cmd);
13619 }
13620 m_MobicatMask = enableMobi;
13621
13622 return ret;
13623}
13624
13625/*===========================================================================
13626* FUNCTION : getLogLevel
13627*
13628* DESCRIPTION: Reads the log level property into a variable
13629*
13630* PARAMETERS :
13631* None
13632*
13633* RETURN :
13634* None
13635*==========================================================================*/
13636void QCamera3HardwareInterface::getLogLevel()
13637{
13638 char prop[PROPERTY_VALUE_MAX];
13639 uint32_t globalLogLevel = 0;
13640
13641 property_get("persist.camera.hal.debug", prop, "0");
13642 int val = atoi(prop);
13643 if (0 <= val) {
13644 gCamHal3LogLevel = (uint32_t)val;
13645 }
13646
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013647 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013648 gKpiDebugLevel = atoi(prop);
13649
13650 property_get("persist.camera.global.debug", prop, "0");
13651 val = atoi(prop);
13652 if (0 <= val) {
13653 globalLogLevel = (uint32_t)val;
13654 }
13655
13656 /* Highest log level among hal.logs and global.logs is selected */
13657 if (gCamHal3LogLevel < globalLogLevel)
13658 gCamHal3LogLevel = globalLogLevel;
13659
13660 return;
13661}
13662
13663/*===========================================================================
13664 * FUNCTION : validateStreamRotations
13665 *
13666 * DESCRIPTION: Check if the rotations requested are supported
13667 *
13668 * PARAMETERS :
13669 * @stream_list : streams to be configured
13670 *
13671 * RETURN : NO_ERROR on success
13672 * -EINVAL on failure
13673 *
13674 *==========================================================================*/
13675int QCamera3HardwareInterface::validateStreamRotations(
13676 camera3_stream_configuration_t *streamList)
13677{
13678 int rc = NO_ERROR;
13679
13680 /*
13681 * Loop through all streams requested in configuration
13682 * Check if unsupported rotations have been requested on any of them
13683 */
13684 for (size_t j = 0; j < streamList->num_streams; j++){
13685 camera3_stream_t *newStream = streamList->streams[j];
13686
13687 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13688 bool isImplDef = (newStream->format ==
13689 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13690 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13691 isImplDef);
13692
13693 if (isRotated && (!isImplDef || isZsl)) {
13694 LOGE("Error: Unsupported rotation of %d requested for stream"
13695 "type:%d and stream format:%d",
13696 newStream->rotation, newStream->stream_type,
13697 newStream->format);
13698 rc = -EINVAL;
13699 break;
13700 }
13701 }
13702
13703 return rc;
13704}
13705
13706/*===========================================================================
13707* FUNCTION : getFlashInfo
13708*
13709* DESCRIPTION: Retrieve information about whether the device has a flash.
13710*
13711* PARAMETERS :
13712* @cameraId : Camera id to query
13713* @hasFlash : Boolean indicating whether there is a flash device
13714* associated with given camera
13715* @flashNode : If a flash device exists, this will be its device node.
13716*
13717* RETURN :
13718* None
13719*==========================================================================*/
13720void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13721 bool& hasFlash,
13722 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13723{
13724 cam_capability_t* camCapability = gCamCapability[cameraId];
13725 if (NULL == camCapability) {
13726 hasFlash = false;
13727 flashNode[0] = '\0';
13728 } else {
13729 hasFlash = camCapability->flash_available;
13730 strlcpy(flashNode,
13731 (char*)camCapability->flash_dev_name,
13732 QCAMERA_MAX_FILEPATH_LENGTH);
13733 }
13734}
13735
13736/*===========================================================================
13737* FUNCTION : getEepromVersionInfo
13738*
13739* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13740*
13741* PARAMETERS : None
13742*
13743* RETURN : string describing EEPROM version
13744* "\0" if no such info available
13745*==========================================================================*/
13746const char *QCamera3HardwareInterface::getEepromVersionInfo()
13747{
13748 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13749}
13750
13751/*===========================================================================
13752* FUNCTION : getLdafCalib
13753*
13754* DESCRIPTION: Retrieve Laser AF calibration data
13755*
13756* PARAMETERS : None
13757*
13758* RETURN : Two uint32_t describing laser AF calibration data
13759* NULL if none is available.
13760*==========================================================================*/
13761const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13762{
13763 if (mLdafCalibExist) {
13764 return &mLdafCalib[0];
13765 } else {
13766 return NULL;
13767 }
13768}
13769
13770/*===========================================================================
13771 * FUNCTION : dynamicUpdateMetaStreamInfo
13772 *
13773 * DESCRIPTION: This function:
13774 * (1) stops all the channels
13775 * (2) returns error on pending requests and buffers
13776 * (3) sends metastream_info in setparams
13777 * (4) starts all channels
13778 * This is useful when sensor has to be restarted to apply any
13779 * settings such as frame rate from a different sensor mode
13780 *
13781 * PARAMETERS : None
13782 *
13783 * RETURN : NO_ERROR on success
13784 * Error codes on failure
13785 *
13786 *==========================================================================*/
13787int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13788{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013789 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013790 int rc = NO_ERROR;
13791
13792 LOGD("E");
13793
13794 rc = stopAllChannels();
13795 if (rc < 0) {
13796 LOGE("stopAllChannels failed");
13797 return rc;
13798 }
13799
13800 rc = notifyErrorForPendingRequests();
13801 if (rc < 0) {
13802 LOGE("notifyErrorForPendingRequests failed");
13803 return rc;
13804 }
13805
13806 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13807 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13808 "Format:%d",
13809 mStreamConfigInfo.type[i],
13810 mStreamConfigInfo.stream_sizes[i].width,
13811 mStreamConfigInfo.stream_sizes[i].height,
13812 mStreamConfigInfo.postprocess_mask[i],
13813 mStreamConfigInfo.format[i]);
13814 }
13815
13816 /* Send meta stream info once again so that ISP can start */
13817 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13818 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13819 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13820 mParameters);
13821 if (rc < 0) {
13822 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13823 }
13824
13825 rc = startAllChannels();
13826 if (rc < 0) {
13827 LOGE("startAllChannels failed");
13828 return rc;
13829 }
13830
13831 LOGD("X");
13832 return rc;
13833}
13834
13835/*===========================================================================
13836 * FUNCTION : stopAllChannels
13837 *
13838 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13839 *
13840 * PARAMETERS : None
13841 *
13842 * RETURN : NO_ERROR on success
13843 * Error codes on failure
13844 *
13845 *==========================================================================*/
13846int32_t QCamera3HardwareInterface::stopAllChannels()
13847{
13848 int32_t rc = NO_ERROR;
13849
13850 LOGD("Stopping all channels");
13851 // Stop the Streams/Channels
13852 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13853 it != mStreamInfo.end(); it++) {
13854 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13855 if (channel) {
13856 channel->stop();
13857 }
13858 (*it)->status = INVALID;
13859 }
13860
13861 if (mSupportChannel) {
13862 mSupportChannel->stop();
13863 }
13864 if (mAnalysisChannel) {
13865 mAnalysisChannel->stop();
13866 }
13867 if (mRawDumpChannel) {
13868 mRawDumpChannel->stop();
13869 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013870 if (mHdrPlusRawSrcChannel) {
13871 mHdrPlusRawSrcChannel->stop();
13872 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013873 if (mMetadataChannel) {
13874 /* If content of mStreamInfo is not 0, there is metadata stream */
13875 mMetadataChannel->stop();
13876 }
13877
13878 LOGD("All channels stopped");
13879 return rc;
13880}
13881
13882/*===========================================================================
13883 * FUNCTION : startAllChannels
13884 *
13885 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13886 *
13887 * PARAMETERS : None
13888 *
13889 * RETURN : NO_ERROR on success
13890 * Error codes on failure
13891 *
13892 *==========================================================================*/
13893int32_t QCamera3HardwareInterface::startAllChannels()
13894{
13895 int32_t rc = NO_ERROR;
13896
13897 LOGD("Start all channels ");
13898 // Start the Streams/Channels
13899 if (mMetadataChannel) {
13900 /* If content of mStreamInfo is not 0, there is metadata stream */
13901 rc = mMetadataChannel->start();
13902 if (rc < 0) {
13903 LOGE("META channel start failed");
13904 return rc;
13905 }
13906 }
13907 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13908 it != mStreamInfo.end(); it++) {
13909 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13910 if (channel) {
13911 rc = channel->start();
13912 if (rc < 0) {
13913 LOGE("channel start failed");
13914 return rc;
13915 }
13916 }
13917 }
13918 if (mAnalysisChannel) {
13919 mAnalysisChannel->start();
13920 }
13921 if (mSupportChannel) {
13922 rc = mSupportChannel->start();
13923 if (rc < 0) {
13924 LOGE("Support channel start failed");
13925 return rc;
13926 }
13927 }
13928 if (mRawDumpChannel) {
13929 rc = mRawDumpChannel->start();
13930 if (rc < 0) {
13931 LOGE("RAW dump channel start failed");
13932 return rc;
13933 }
13934 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013935 if (mHdrPlusRawSrcChannel) {
13936 rc = mHdrPlusRawSrcChannel->start();
13937 if (rc < 0) {
13938 LOGE("HDR+ RAW channel start failed");
13939 return rc;
13940 }
13941 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013942
13943 LOGD("All channels started");
13944 return rc;
13945}
13946
13947/*===========================================================================
13948 * FUNCTION : notifyErrorForPendingRequests
13949 *
13950 * DESCRIPTION: This function sends error for all the pending requests/buffers
13951 *
13952 * PARAMETERS : None
13953 *
13954 * RETURN : Error codes
13955 * NO_ERROR on success
13956 *
13957 *==========================================================================*/
13958int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13959{
Emilian Peev7650c122017-01-19 08:24:33 -080013960 notifyErrorFoPendingDepthData(mDepthChannel);
13961
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013962 auto pendingRequest = mPendingRequestsList.begin();
13963 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013964
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013965 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13966 // buffers (for which buffers aren't sent yet).
13967 while (pendingRequest != mPendingRequestsList.end() ||
13968 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13969 if (pendingRequest == mPendingRequestsList.end() ||
13970 pendingBuffer->frame_number < pendingRequest->frame_number) {
13971 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13972 // with error.
13973 for (auto &info : pendingBuffer->mPendingBufferList) {
13974 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013975 camera3_notify_msg_t notify_msg;
13976 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13977 notify_msg.type = CAMERA3_MSG_ERROR;
13978 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013979 notify_msg.message.error.error_stream = info.stream;
13980 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013981 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013982
13983 camera3_stream_buffer_t buffer = {};
13984 buffer.acquire_fence = -1;
13985 buffer.release_fence = -1;
13986 buffer.buffer = info.buffer;
13987 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13988 buffer.stream = info.stream;
13989 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070013990 }
13991
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013992 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13993 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
13994 pendingBuffer->frame_number > pendingRequest->frame_number) {
13995 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070013996 camera3_notify_msg_t notify_msg;
13997 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13998 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013999 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14000 notify_msg.message.error.error_stream = nullptr;
14001 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014002 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014003
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014004 if (pendingRequest->input_buffer != nullptr) {
14005 camera3_capture_result result = {};
14006 result.frame_number = pendingRequest->frame_number;
14007 result.result = nullptr;
14008 result.input_buffer = pendingRequest->input_buffer;
14009 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014010 }
14011
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014012 mShutterDispatcher.clear(pendingRequest->frame_number);
14013 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14014 } else {
14015 // If both buffers and result metadata weren't sent yet, notify about a request error
14016 // and return buffers with error.
14017 for (auto &info : pendingBuffer->mPendingBufferList) {
14018 camera3_notify_msg_t notify_msg;
14019 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14020 notify_msg.type = CAMERA3_MSG_ERROR;
14021 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14022 notify_msg.message.error.error_stream = info.stream;
14023 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14024 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014025
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014026 camera3_stream_buffer_t buffer = {};
14027 buffer.acquire_fence = -1;
14028 buffer.release_fence = -1;
14029 buffer.buffer = info.buffer;
14030 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14031 buffer.stream = info.stream;
14032 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14033 }
14034
14035 if (pendingRequest->input_buffer != nullptr) {
14036 camera3_capture_result result = {};
14037 result.frame_number = pendingRequest->frame_number;
14038 result.result = nullptr;
14039 result.input_buffer = pendingRequest->input_buffer;
14040 orchestrateResult(&result);
14041 }
14042
14043 mShutterDispatcher.clear(pendingRequest->frame_number);
14044 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14045 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014046 }
14047 }
14048
14049 /* Reset pending frame Drop list and requests list */
14050 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014051 mShutterDispatcher.clear();
14052 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014053 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014054 LOGH("Cleared all the pending buffers ");
14055
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014056 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014057}
14058
14059bool QCamera3HardwareInterface::isOnEncoder(
14060 const cam_dimension_t max_viewfinder_size,
14061 uint32_t width, uint32_t height)
14062{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014063 return ((width > (uint32_t)max_viewfinder_size.width) ||
14064 (height > (uint32_t)max_viewfinder_size.height) ||
14065 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14066 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014067}
14068
14069/*===========================================================================
14070 * FUNCTION : setBundleInfo
14071 *
14072 * DESCRIPTION: Set bundle info for all streams that are bundle.
14073 *
14074 * PARAMETERS : None
14075 *
14076 * RETURN : NO_ERROR on success
14077 * Error codes on failure
14078 *==========================================================================*/
14079int32_t QCamera3HardwareInterface::setBundleInfo()
14080{
14081 int32_t rc = NO_ERROR;
14082
14083 if (mChannelHandle) {
14084 cam_bundle_config_t bundleInfo;
14085 memset(&bundleInfo, 0, sizeof(bundleInfo));
14086 rc = mCameraHandle->ops->get_bundle_info(
14087 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14088 if (rc != NO_ERROR) {
14089 LOGE("get_bundle_info failed");
14090 return rc;
14091 }
14092 if (mAnalysisChannel) {
14093 mAnalysisChannel->setBundleInfo(bundleInfo);
14094 }
14095 if (mSupportChannel) {
14096 mSupportChannel->setBundleInfo(bundleInfo);
14097 }
14098 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14099 it != mStreamInfo.end(); it++) {
14100 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14101 channel->setBundleInfo(bundleInfo);
14102 }
14103 if (mRawDumpChannel) {
14104 mRawDumpChannel->setBundleInfo(bundleInfo);
14105 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014106 if (mHdrPlusRawSrcChannel) {
14107 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14108 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014109 }
14110
14111 return rc;
14112}
14113
14114/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014115 * FUNCTION : setInstantAEC
14116 *
14117 * DESCRIPTION: Set Instant AEC related params.
14118 *
14119 * PARAMETERS :
14120 * @meta: CameraMetadata reference
14121 *
14122 * RETURN : NO_ERROR on success
14123 * Error codes on failure
14124 *==========================================================================*/
14125int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14126{
14127 int32_t rc = NO_ERROR;
14128 uint8_t val = 0;
14129 char prop[PROPERTY_VALUE_MAX];
14130
14131 // First try to configure instant AEC from framework metadata
14132 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14133 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14134 }
14135
14136 // If framework did not set this value, try to read from set prop.
14137 if (val == 0) {
14138 memset(prop, 0, sizeof(prop));
14139 property_get("persist.camera.instant.aec", prop, "0");
14140 val = (uint8_t)atoi(prop);
14141 }
14142
14143 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14144 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14145 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14146 mInstantAEC = val;
14147 mInstantAECSettledFrameNumber = 0;
14148 mInstantAecFrameIdxCount = 0;
14149 LOGH("instantAEC value set %d",val);
14150 if (mInstantAEC) {
14151 memset(prop, 0, sizeof(prop));
14152 property_get("persist.camera.ae.instant.bound", prop, "10");
14153 int32_t aec_frame_skip_cnt = atoi(prop);
14154 if (aec_frame_skip_cnt >= 0) {
14155 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14156 } else {
14157 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14158 rc = BAD_VALUE;
14159 }
14160 }
14161 } else {
14162 LOGE("Bad instant aec value set %d", val);
14163 rc = BAD_VALUE;
14164 }
14165 return rc;
14166}
14167
14168/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014169 * FUNCTION : get_num_overall_buffers
14170 *
14171 * DESCRIPTION: Estimate number of pending buffers across all requests.
14172 *
14173 * PARAMETERS : None
14174 *
14175 * RETURN : Number of overall pending buffers
14176 *
14177 *==========================================================================*/
14178uint32_t PendingBuffersMap::get_num_overall_buffers()
14179{
14180 uint32_t sum_buffers = 0;
14181 for (auto &req : mPendingBuffersInRequest) {
14182 sum_buffers += req.mPendingBufferList.size();
14183 }
14184 return sum_buffers;
14185}
14186
14187/*===========================================================================
14188 * FUNCTION : removeBuf
14189 *
14190 * DESCRIPTION: Remove a matching buffer from tracker.
14191 *
14192 * PARAMETERS : @buffer: image buffer for the callback
14193 *
14194 * RETURN : None
14195 *
14196 *==========================================================================*/
14197void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14198{
14199 bool buffer_found = false;
14200 for (auto req = mPendingBuffersInRequest.begin();
14201 req != mPendingBuffersInRequest.end(); req++) {
14202 for (auto k = req->mPendingBufferList.begin();
14203 k != req->mPendingBufferList.end(); k++ ) {
14204 if (k->buffer == buffer) {
14205 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14206 req->frame_number, buffer);
14207 k = req->mPendingBufferList.erase(k);
14208 if (req->mPendingBufferList.empty()) {
14209 // Remove this request from Map
14210 req = mPendingBuffersInRequest.erase(req);
14211 }
14212 buffer_found = true;
14213 break;
14214 }
14215 }
14216 if (buffer_found) {
14217 break;
14218 }
14219 }
14220 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14221 get_num_overall_buffers());
14222}
14223
14224/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014225 * FUNCTION : getBufErrStatus
14226 *
14227 * DESCRIPTION: get buffer error status
14228 *
14229 * PARAMETERS : @buffer: buffer handle
14230 *
14231 * RETURN : Error status
14232 *
14233 *==========================================================================*/
14234int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14235{
14236 for (auto& req : mPendingBuffersInRequest) {
14237 for (auto& k : req.mPendingBufferList) {
14238 if (k.buffer == buffer)
14239 return k.bufStatus;
14240 }
14241 }
14242 return CAMERA3_BUFFER_STATUS_OK;
14243}
14244
14245/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014246 * FUNCTION : setPAAFSupport
14247 *
14248 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14249 * feature mask according to stream type and filter
14250 * arrangement
14251 *
14252 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14253 * @stream_type: stream type
14254 * @filter_arrangement: filter arrangement
14255 *
14256 * RETURN : None
14257 *==========================================================================*/
14258void QCamera3HardwareInterface::setPAAFSupport(
14259 cam_feature_mask_t& feature_mask,
14260 cam_stream_type_t stream_type,
14261 cam_color_filter_arrangement_t filter_arrangement)
14262{
Thierry Strudel3d639192016-09-09 11:52:26 -070014263 switch (filter_arrangement) {
14264 case CAM_FILTER_ARRANGEMENT_RGGB:
14265 case CAM_FILTER_ARRANGEMENT_GRBG:
14266 case CAM_FILTER_ARRANGEMENT_GBRG:
14267 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014268 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14269 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014270 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014271 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14272 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014273 }
14274 break;
14275 case CAM_FILTER_ARRANGEMENT_Y:
14276 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14277 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14278 }
14279 break;
14280 default:
14281 break;
14282 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014283 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14284 feature_mask, stream_type, filter_arrangement);
14285
14286
Thierry Strudel3d639192016-09-09 11:52:26 -070014287}
14288
14289/*===========================================================================
14290* FUNCTION : getSensorMountAngle
14291*
14292* DESCRIPTION: Retrieve sensor mount angle
14293*
14294* PARAMETERS : None
14295*
14296* RETURN : sensor mount angle in uint32_t
14297*==========================================================================*/
14298uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14299{
14300 return gCamCapability[mCameraId]->sensor_mount_angle;
14301}
14302
14303/*===========================================================================
14304* FUNCTION : getRelatedCalibrationData
14305*
14306* DESCRIPTION: Retrieve related system calibration data
14307*
14308* PARAMETERS : None
14309*
14310* RETURN : Pointer of related system calibration data
14311*==========================================================================*/
14312const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14313{
14314 return (const cam_related_system_calibration_data_t *)
14315 &(gCamCapability[mCameraId]->related_cam_calibration);
14316}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014317
14318/*===========================================================================
14319 * FUNCTION : is60HzZone
14320 *
14321 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14322 *
14323 * PARAMETERS : None
14324 *
14325 * RETURN : True if in 60Hz zone, False otherwise
14326 *==========================================================================*/
14327bool QCamera3HardwareInterface::is60HzZone()
14328{
14329 time_t t = time(NULL);
14330 struct tm lt;
14331
14332 struct tm* r = localtime_r(&t, &lt);
14333
14334 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14335 return true;
14336 else
14337 return false;
14338}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014339
14340/*===========================================================================
14341 * FUNCTION : adjustBlackLevelForCFA
14342 *
14343 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14344 * of bayer CFA (Color Filter Array).
14345 *
14346 * PARAMETERS : @input: black level pattern in the order of RGGB
14347 * @output: black level pattern in the order of CFA
14348 * @color_arrangement: CFA color arrangement
14349 *
14350 * RETURN : None
14351 *==========================================================================*/
14352template<typename T>
14353void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14354 T input[BLACK_LEVEL_PATTERN_CNT],
14355 T output[BLACK_LEVEL_PATTERN_CNT],
14356 cam_color_filter_arrangement_t color_arrangement)
14357{
14358 switch (color_arrangement) {
14359 case CAM_FILTER_ARRANGEMENT_GRBG:
14360 output[0] = input[1];
14361 output[1] = input[0];
14362 output[2] = input[3];
14363 output[3] = input[2];
14364 break;
14365 case CAM_FILTER_ARRANGEMENT_GBRG:
14366 output[0] = input[2];
14367 output[1] = input[3];
14368 output[2] = input[0];
14369 output[3] = input[1];
14370 break;
14371 case CAM_FILTER_ARRANGEMENT_BGGR:
14372 output[0] = input[3];
14373 output[1] = input[2];
14374 output[2] = input[1];
14375 output[3] = input[0];
14376 break;
14377 case CAM_FILTER_ARRANGEMENT_RGGB:
14378 output[0] = input[0];
14379 output[1] = input[1];
14380 output[2] = input[2];
14381 output[3] = input[3];
14382 break;
14383 default:
14384 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14385 break;
14386 }
14387}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014388
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014389void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14390 CameraMetadata &resultMetadata,
14391 std::shared_ptr<metadata_buffer_t> settings)
14392{
14393 if (settings == nullptr) {
14394 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14395 return;
14396 }
14397
14398 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14399 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14400 }
14401
14402 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14403 String8 str((const char *)gps_methods);
14404 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14405 }
14406
14407 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14408 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14409 }
14410
14411 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14412 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14413 }
14414
14415 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14416 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14417 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14418 }
14419
14420 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14421 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14422 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14423 }
14424
14425 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14426 int32_t fwk_thumb_size[2];
14427 fwk_thumb_size[0] = thumb_size->width;
14428 fwk_thumb_size[1] = thumb_size->height;
14429 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14430 }
14431
14432 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14433 uint8_t fwk_intent = intent[0];
14434 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14435 }
14436}
14437
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014438bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14439 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14440 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014441{
14442 if (hdrPlusRequest == nullptr) return false;
14443
14444 // Check noise reduction mode is high quality.
14445 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14446 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14447 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014448 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14449 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014450 return false;
14451 }
14452
14453 // Check edge mode is high quality.
14454 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14455 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14456 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14457 return false;
14458 }
14459
14460 if (request.num_output_buffers != 1 ||
14461 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14462 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014463 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14464 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14465 request.output_buffers[0].stream->width,
14466 request.output_buffers[0].stream->height,
14467 request.output_buffers[0].stream->format);
14468 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014469 return false;
14470 }
14471
14472 // Get a YUV buffer from pic channel.
14473 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14474 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14475 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14476 if (res != OK) {
14477 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14478 __FUNCTION__, strerror(-res), res);
14479 return false;
14480 }
14481
14482 pbcamera::StreamBuffer buffer;
14483 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014484 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014485 buffer.data = yuvBuffer->buffer;
14486 buffer.dataSize = yuvBuffer->frame_len;
14487
14488 pbcamera::CaptureRequest pbRequest;
14489 pbRequest.id = request.frame_number;
14490 pbRequest.outputBuffers.push_back(buffer);
14491
14492 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014493 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014494 if (res != OK) {
14495 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14496 strerror(-res), res);
14497 return false;
14498 }
14499
14500 hdrPlusRequest->yuvBuffer = yuvBuffer;
14501 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14502
14503 return true;
14504}
14505
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014506status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14507{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014508 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14509 return OK;
14510 }
14511
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014512 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014513 if (res != OK) {
14514 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14515 strerror(-res), res);
14516 return res;
14517 }
14518 gHdrPlusClientOpening = true;
14519
14520 return OK;
14521}
14522
Chien-Yu Chenee335912017-02-09 17:53:20 -080014523status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14524{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014525 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014526
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014527 if (mHdrPlusModeEnabled) {
14528 return OK;
14529 }
14530
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014531 // Check if gHdrPlusClient is opened or being opened.
14532 if (gHdrPlusClient == nullptr) {
14533 if (gHdrPlusClientOpening) {
14534 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14535 return OK;
14536 }
14537
14538 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014539 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014540 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14541 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014542 return res;
14543 }
14544
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014545 // When opening HDR+ client completes, HDR+ mode will be enabled.
14546 return OK;
14547
Chien-Yu Chenee335912017-02-09 17:53:20 -080014548 }
14549
14550 // Configure stream for HDR+.
14551 res = configureHdrPlusStreamsLocked();
14552 if (res != OK) {
14553 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014554 return res;
14555 }
14556
14557 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14558 res = gHdrPlusClient->setZslHdrPlusMode(true);
14559 if (res != OK) {
14560 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014561 return res;
14562 }
14563
14564 mHdrPlusModeEnabled = true;
14565 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14566
14567 return OK;
14568}
14569
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014570void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14571{
14572 if (gHdrPlusClientOpening) {
14573 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14574 }
14575}
14576
Chien-Yu Chenee335912017-02-09 17:53:20 -080014577void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14578{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014579 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014580 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014581 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14582 if (res != OK) {
14583 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14584 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014585
14586 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014587 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014588 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014589 }
14590
14591 mHdrPlusModeEnabled = false;
14592 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14593}
14594
14595status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014596{
14597 pbcamera::InputConfiguration inputConfig;
14598 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14599 status_t res = OK;
14600
14601 // Configure HDR+ client streams.
14602 // Get input config.
14603 if (mHdrPlusRawSrcChannel) {
14604 // HDR+ input buffers will be provided by HAL.
14605 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14606 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14607 if (res != OK) {
14608 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14609 __FUNCTION__, strerror(-res), res);
14610 return res;
14611 }
14612
14613 inputConfig.isSensorInput = false;
14614 } else {
14615 // Sensor MIPI will send data to Easel.
14616 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014617 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014618 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14619 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14620 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14621 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14622 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014623 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014624 if (mSensorModeInfo.num_raw_bits != 10) {
14625 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14626 mSensorModeInfo.num_raw_bits);
14627 return BAD_VALUE;
14628 }
14629
14630 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014631 }
14632
14633 // Get output configurations.
14634 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014635 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014636
14637 // Easel may need to output YUV output buffers if mPictureChannel was created.
14638 pbcamera::StreamConfiguration yuvOutputConfig;
14639 if (mPictureChannel != nullptr) {
14640 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14641 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14642 if (res != OK) {
14643 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14644 __FUNCTION__, strerror(-res), res);
14645
14646 return res;
14647 }
14648
14649 outputStreamConfigs.push_back(yuvOutputConfig);
14650 }
14651
14652 // TODO: consider other channels for YUV output buffers.
14653
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014654 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014655 if (res != OK) {
14656 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14657 strerror(-res), res);
14658 return res;
14659 }
14660
14661 return OK;
14662}
14663
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014664void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14665{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014666 if (client == nullptr) {
14667 ALOGE("%s: Opened client is null.", __FUNCTION__);
14668 return;
14669 }
14670
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014671 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014672 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14673
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014674 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014675 if (!gHdrPlusClientOpening) {
14676 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14677 return;
14678 }
14679
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014680 gHdrPlusClient = std::move(client);
14681 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014682 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014683
14684 // Set static metadata.
14685 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14686 if (res != OK) {
14687 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14688 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014689 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014690 gHdrPlusClient = nullptr;
14691 return;
14692 }
14693
14694 // Enable HDR+ mode.
14695 res = enableHdrPlusModeLocked();
14696 if (res != OK) {
14697 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14698 }
14699}
14700
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014701void QCamera3HardwareInterface::onOpenFailed(status_t err)
14702{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014703 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014704 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014705 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014706 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014707}
14708
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014709void QCamera3HardwareInterface::onFatalError()
14710{
14711 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14712
14713 // Set HAL state to error.
14714 pthread_mutex_lock(&mMutex);
14715 mState = ERROR;
14716 pthread_mutex_unlock(&mMutex);
14717
14718 handleCameraDeviceError();
14719}
14720
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070014721void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
14722{
14723 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
14724 __LINE__, requestId, apSensorTimestampNs);
14725
14726 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
14727}
14728
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014729void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014730 const camera_metadata_t &resultMetadata)
14731{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014732 if (result != nullptr) {
14733 if (result->outputBuffers.size() != 1) {
14734 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14735 result->outputBuffers.size());
14736 return;
14737 }
14738
14739 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14740 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14741 result->outputBuffers[0].streamId);
14742 return;
14743 }
14744
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014745 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014746 HdrPlusPendingRequest pendingRequest;
14747 {
14748 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14749 auto req = mHdrPlusPendingRequests.find(result->requestId);
14750 pendingRequest = req->second;
14751 }
14752
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014753 // Update the result metadata with the settings of the HDR+ still capture request because
14754 // the result metadata belongs to a ZSL buffer.
14755 CameraMetadata metadata;
14756 metadata = &resultMetadata;
14757 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14758 camera_metadata_t* updatedResultMetadata = metadata.release();
14759
14760 QCamera3PicChannel *picChannel =
14761 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14762
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014763 // Check if dumping HDR+ YUV output is enabled.
14764 char prop[PROPERTY_VALUE_MAX];
14765 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14766 bool dumpYuvOutput = atoi(prop);
14767
14768 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014769 // Dump yuv buffer to a ppm file.
14770 pbcamera::StreamConfiguration outputConfig;
14771 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14772 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14773 if (rc == OK) {
14774 char buf[FILENAME_MAX] = {};
14775 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14776 result->requestId, result->outputBuffers[0].streamId,
14777 outputConfig.image.width, outputConfig.image.height);
14778
14779 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14780 } else {
14781 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14782 __FUNCTION__, strerror(-rc), rc);
14783 }
14784 }
14785
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014786 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14787 auto halMetadata = std::make_shared<metadata_buffer_t>();
14788 clear_metadata_buffer(halMetadata.get());
14789
14790 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14791 // encoding.
14792 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14793 halStreamId, /*minFrameDuration*/0);
14794 if (res == OK) {
14795 // Return the buffer to pic channel for encoding.
14796 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14797 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14798 halMetadata);
14799 } else {
14800 // Return the buffer without encoding.
14801 // TODO: This should not happen but we may want to report an error buffer to camera
14802 // service.
14803 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14804 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14805 strerror(-res), res);
14806 }
14807
14808 // Send HDR+ metadata to framework.
14809 {
14810 pthread_mutex_lock(&mMutex);
14811
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014812 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14813 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014814 pthread_mutex_unlock(&mMutex);
14815 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014816
14817 // Remove the HDR+ pending request.
14818 {
14819 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14820 auto req = mHdrPlusPendingRequests.find(result->requestId);
14821 mHdrPlusPendingRequests.erase(req);
14822 }
14823 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014824}
14825
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014826void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14827{
14828 if (failedResult == nullptr) {
14829 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14830 return;
14831 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014832
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014833 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014834
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014835 // Remove the pending HDR+ request.
14836 {
14837 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14838 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14839
14840 // Return the buffer to pic channel.
14841 QCamera3PicChannel *picChannel =
14842 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14843 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14844
14845 mHdrPlusPendingRequests.erase(pendingRequest);
14846 }
14847
14848 pthread_mutex_lock(&mMutex);
14849
14850 // Find the pending buffers.
14851 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14852 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14853 if (pendingBuffers->frame_number == failedResult->requestId) {
14854 break;
14855 }
14856 pendingBuffers++;
14857 }
14858
14859 // Send out buffer errors for the pending buffers.
14860 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14861 std::vector<camera3_stream_buffer_t> streamBuffers;
14862 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14863 // Prepare a stream buffer.
14864 camera3_stream_buffer_t streamBuffer = {};
14865 streamBuffer.stream = buffer.stream;
14866 streamBuffer.buffer = buffer.buffer;
14867 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14868 streamBuffer.acquire_fence = -1;
14869 streamBuffer.release_fence = -1;
14870
14871 streamBuffers.push_back(streamBuffer);
14872
14873 // Send out error buffer event.
14874 camera3_notify_msg_t notify_msg = {};
14875 notify_msg.type = CAMERA3_MSG_ERROR;
14876 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14877 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14878 notify_msg.message.error.error_stream = buffer.stream;
14879
14880 orchestrateNotify(&notify_msg);
14881 }
14882
14883 camera3_capture_result_t result = {};
14884 result.frame_number = pendingBuffers->frame_number;
14885 result.num_output_buffers = streamBuffers.size();
14886 result.output_buffers = &streamBuffers[0];
14887
14888 // Send out result with buffer errors.
14889 orchestrateResult(&result);
14890
14891 // Remove pending buffers.
14892 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14893 }
14894
14895 // Remove pending request.
14896 auto halRequest = mPendingRequestsList.begin();
14897 while (halRequest != mPendingRequestsList.end()) {
14898 if (halRequest->frame_number == failedResult->requestId) {
14899 mPendingRequestsList.erase(halRequest);
14900 break;
14901 }
14902 halRequest++;
14903 }
14904
14905 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014906}
14907
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014908
14909ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14910 mParent(parent) {}
14911
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014912void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014913{
14914 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014915
14916 if (isReprocess) {
14917 mReprocessShutters.emplace(frameNumber, Shutter());
14918 } else {
14919 mShutters.emplace(frameNumber, Shutter());
14920 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014921}
14922
14923void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14924{
14925 std::lock_guard<std::mutex> lock(mLock);
14926
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014927 std::map<uint32_t, Shutter> *shutters = nullptr;
14928
14929 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014930 auto shutter = mShutters.find(frameNumber);
14931 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014932 shutter = mReprocessShutters.find(frameNumber);
14933 if (shutter == mReprocessShutters.end()) {
14934 // Shutter was already sent.
14935 return;
14936 }
14937 shutters = &mReprocessShutters;
14938 } else {
14939 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014940 }
14941
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014942 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014943 shutter->second.ready = true;
14944 shutter->second.timestamp = timestamp;
14945
14946 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014947 shutter = shutters->begin();
14948 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014949 if (!shutter->second.ready) {
14950 // If this shutter is not ready, the following shutters can't be sent.
14951 break;
14952 }
14953
14954 camera3_notify_msg_t msg = {};
14955 msg.type = CAMERA3_MSG_SHUTTER;
14956 msg.message.shutter.frame_number = shutter->first;
14957 msg.message.shutter.timestamp = shutter->second.timestamp;
14958 mParent->orchestrateNotify(&msg);
14959
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014960 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014961 }
14962}
14963
14964void ShutterDispatcher::clear(uint32_t frameNumber)
14965{
14966 std::lock_guard<std::mutex> lock(mLock);
14967 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014968 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014969}
14970
14971void ShutterDispatcher::clear()
14972{
14973 std::lock_guard<std::mutex> lock(mLock);
14974
14975 // Log errors for stale shutters.
14976 for (auto &shutter : mShutters) {
14977 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14978 __FUNCTION__, shutter.first, shutter.second.ready,
14979 shutter.second.timestamp);
14980 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014981
14982 // Log errors for stale reprocess shutters.
14983 for (auto &shutter : mReprocessShutters) {
14984 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
14985 __FUNCTION__, shutter.first, shutter.second.ready,
14986 shutter.second.timestamp);
14987 }
14988
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014989 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014990 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014991}
14992
14993OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
14994 mParent(parent) {}
14995
14996status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
14997{
14998 std::lock_guard<std::mutex> lock(mLock);
14999 mStreamBuffers.clear();
15000 if (!streamList) {
15001 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15002 return -EINVAL;
15003 }
15004
15005 // Create a "frame-number -> buffer" map for each stream.
15006 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15007 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15008 }
15009
15010 return OK;
15011}
15012
15013status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15014{
15015 std::lock_guard<std::mutex> lock(mLock);
15016
15017 // Find the "frame-number -> buffer" map for the stream.
15018 auto buffers = mStreamBuffers.find(stream);
15019 if (buffers == mStreamBuffers.end()) {
15020 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15021 return -EINVAL;
15022 }
15023
15024 // Create an unready buffer for this frame number.
15025 buffers->second.emplace(frameNumber, Buffer());
15026 return OK;
15027}
15028
15029void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15030 const camera3_stream_buffer_t &buffer)
15031{
15032 std::lock_guard<std::mutex> lock(mLock);
15033
15034 // Find the frame number -> buffer map for the stream.
15035 auto buffers = mStreamBuffers.find(buffer.stream);
15036 if (buffers == mStreamBuffers.end()) {
15037 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15038 return;
15039 }
15040
15041 // Find the unready buffer this frame number and mark it ready.
15042 auto pendingBuffer = buffers->second.find(frameNumber);
15043 if (pendingBuffer == buffers->second.end()) {
15044 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15045 return;
15046 }
15047
15048 pendingBuffer->second.ready = true;
15049 pendingBuffer->second.buffer = buffer;
15050
15051 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15052 pendingBuffer = buffers->second.begin();
15053 while (pendingBuffer != buffers->second.end()) {
15054 if (!pendingBuffer->second.ready) {
15055 // If this buffer is not ready, the following buffers can't be sent.
15056 break;
15057 }
15058
15059 camera3_capture_result_t result = {};
15060 result.frame_number = pendingBuffer->first;
15061 result.num_output_buffers = 1;
15062 result.output_buffers = &pendingBuffer->second.buffer;
15063
15064 // Send out result with buffer errors.
15065 mParent->orchestrateResult(&result);
15066
15067 pendingBuffer = buffers->second.erase(pendingBuffer);
15068 }
15069}
15070
15071void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15072{
15073 std::lock_guard<std::mutex> lock(mLock);
15074
15075 // Log errors for stale buffers.
15076 for (auto &buffers : mStreamBuffers) {
15077 for (auto &buffer : buffers.second) {
15078 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15079 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15080 }
15081 buffers.second.clear();
15082 }
15083
15084 if (clearConfiguredStreams) {
15085 mStreamBuffers.clear();
15086 }
15087}
15088
Thierry Strudel3d639192016-09-09 11:52:26 -070015089}; //end namespace qcamera