blob: 2d9a52a7f7dd8432995feaf0579655b16dbbf093 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
Jiyong Parkd4caeb72017-06-12 17:16:36 +090068using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070069using namespace android;
70
71namespace qcamera {
72
73#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
74
75#define EMPTY_PIPELINE_DELAY 2
76#define PARTIAL_RESULT_COUNT 2
77#define FRAME_SKIP_DELAY 0
78
79#define MAX_VALUE_8BIT ((1<<8)-1)
80#define MAX_VALUE_10BIT ((1<<10)-1)
81#define MAX_VALUE_12BIT ((1<<12)-1)
82
83#define VIDEO_4K_WIDTH 3840
84#define VIDEO_4K_HEIGHT 2160
85
Jason Leeb9e76432017-03-10 17:14:19 -080086#define MAX_EIS_WIDTH 3840
87#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070088
89#define MAX_RAW_STREAMS 1
90#define MAX_STALLING_STREAMS 1
91#define MAX_PROCESSED_STREAMS 3
92/* Batch mode is enabled only if FPS set is equal to or greater than this */
93#define MIN_FPS_FOR_BATCH_MODE (120)
94#define PREVIEW_FPS_FOR_HFR (30)
95#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080096#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070097#define MAX_HFR_BATCH_SIZE (8)
98#define REGIONS_TUPLE_COUNT 5
99#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Eino-Ville Talvala0b3fcb02017-07-13 16:52:32 -0700101#define MISSING_REQUEST_BUF_TIMEOUT 10
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700159std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
279};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_flash_mode_t,
283 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
284 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
285 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
286 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
287};
288
289const QCamera3HardwareInterface::QCameraMap<
290 camera_metadata_enum_android_statistics_face_detect_mode_t,
291 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
295};
296
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
299 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
301 CAM_FOCUS_UNCALIBRATED },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
303 CAM_FOCUS_APPROXIMATE },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
305 CAM_FOCUS_CALIBRATED }
306};
307
308const QCamera3HardwareInterface::QCameraMap<
309 camera_metadata_enum_android_lens_state_t,
310 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
311 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
312 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
313};
314
315const int32_t available_thumbnail_sizes[] = {0, 0,
316 176, 144,
317 240, 144,
318 256, 144,
319 240, 160,
320 256, 154,
321 240, 240,
322 320, 240};
323
324const QCamera3HardwareInterface::QCameraMap<
325 camera_metadata_enum_android_sensor_test_pattern_mode_t,
326 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
333};
334
335/* Since there is no mapping for all the options some Android enum are not listed.
336 * Also, the order in this list is important because while mapping from HAL to Android it will
337 * traverse from lower to higher index which means that for HAL values that are map to different
338 * Android values, the traverse logic will select the first one found.
339 */
340const QCamera3HardwareInterface::QCameraMap<
341 camera_metadata_enum_android_sensor_reference_illuminant1_t,
342 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
359};
360
361const QCamera3HardwareInterface::QCameraMap<
362 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
363 { 60, CAM_HFR_MODE_60FPS},
364 { 90, CAM_HFR_MODE_90FPS},
365 { 120, CAM_HFR_MODE_120FPS},
366 { 150, CAM_HFR_MODE_150FPS},
367 { 180, CAM_HFR_MODE_180FPS},
368 { 210, CAM_HFR_MODE_210FPS},
369 { 240, CAM_HFR_MODE_240FPS},
370 { 480, CAM_HFR_MODE_480FPS},
371};
372
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700373const QCamera3HardwareInterface::QCameraMap<
374 qcamera3_ext_instant_aec_mode_t,
375 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
376 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
377 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
379};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800380
381const QCamera3HardwareInterface::QCameraMap<
382 qcamera3_ext_exposure_meter_mode_t,
383 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
384 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
386 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
387 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
388 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
390 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
391};
392
393const QCamera3HardwareInterface::QCameraMap<
394 qcamera3_ext_iso_mode_t,
395 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
396 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
397 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
398 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
399 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
400 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
401 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
402 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
403 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
404};
405
Thierry Strudel3d639192016-09-09 11:52:26 -0700406camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
407 .initialize = QCamera3HardwareInterface::initialize,
408 .configure_streams = QCamera3HardwareInterface::configure_streams,
409 .register_stream_buffers = NULL,
410 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
411 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
412 .get_metadata_vendor_tag_ops = NULL,
413 .dump = QCamera3HardwareInterface::dump,
414 .flush = QCamera3HardwareInterface::flush,
415 .reserved = {0},
416};
417
418// initialise to some default value
419uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
420
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700421static inline void logEaselEvent(const char *tag, const char *event) {
422 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
423 struct timespec ts = {};
424 static int64_t kMsPerSec = 1000;
425 static int64_t kNsPerMs = 1000000;
426 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
427 if (res != OK) {
428 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
429 } else {
430 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
431 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
432 }
433 }
434}
435
Thierry Strudel3d639192016-09-09 11:52:26 -0700436/*===========================================================================
437 * FUNCTION : QCamera3HardwareInterface
438 *
439 * DESCRIPTION: constructor of QCamera3HardwareInterface
440 *
441 * PARAMETERS :
442 * @cameraId : camera ID
443 *
444 * RETURN : none
445 *==========================================================================*/
446QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
447 const camera_module_callbacks_t *callbacks)
448 : mCameraId(cameraId),
449 mCameraHandle(NULL),
450 mCameraInitialized(false),
451 mCallbackOps(NULL),
452 mMetadataChannel(NULL),
453 mPictureChannel(NULL),
454 mRawChannel(NULL),
455 mSupportChannel(NULL),
456 mAnalysisChannel(NULL),
457 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700458 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700459 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800460 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100461 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800462 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mChannelHandle(0),
464 mFirstConfiguration(true),
465 mFlush(false),
466 mFlushPerf(false),
467 mParamHeap(NULL),
468 mParameters(NULL),
469 mPrevParameters(NULL),
470 m_bIsVideo(false),
471 m_bIs4KVideo(false),
472 m_bEisSupportedSize(false),
473 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800474 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700476 mShutterDispatcher(this),
477 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mMinProcessedFrameDuration(0),
479 mMinJpegFrameDuration(0),
480 mMinRawFrameDuration(0),
481 mMetaFrameCount(0U),
482 mUpdateDebugLevel(false),
483 mCallbacks(callbacks),
484 mCaptureIntent(0),
485 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700486 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800487 /* DevCamDebug metadata internal m control*/
488 mDevCamDebugMetaEnable(0),
489 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700490 mBatchSize(0),
491 mToBeQueuedVidBufs(0),
492 mHFRVideoFps(DEFAULT_VIDEO_FPS),
493 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800494 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800495 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700496 mFirstFrameNumberInBatch(0),
497 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800498 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700499 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
500 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000501 mPDSupported(false),
502 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700503 mInstantAEC(false),
504 mResetInstantAEC(false),
505 mInstantAECSettledFrameNumber(0),
506 mAecSkipDisplayFrameBound(0),
507 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700508 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800509 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700512 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mState(CLOSED),
514 mIsDeviceLinked(false),
515 mIsMainCamera(true),
516 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700517 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800518 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800519 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700520 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800521 mIsApInputUsedForHdrPlus(false),
522 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700524{
525 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700526 mCommon.init(gCamCapability[cameraId]);
527 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700528#ifndef USE_HAL_3_3
529 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
530#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700531 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700532#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700533 mCameraDevice.common.close = close_camera_device;
534 mCameraDevice.ops = &mCameraOps;
535 mCameraDevice.priv = this;
536 gCamCapability[cameraId]->version = CAM_HAL_V3;
537 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
538 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
539 gCamCapability[cameraId]->min_num_pp_bufs = 3;
540
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800541 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700542
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800543 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700544 mPendingLiveRequest = 0;
545 mCurrentRequestId = -1;
546 pthread_mutex_init(&mMutex, NULL);
547
548 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
549 mDefaultMetadata[i] = NULL;
550
551 // Getting system props of different kinds
552 char prop[PROPERTY_VALUE_MAX];
553 memset(prop, 0, sizeof(prop));
554 property_get("persist.camera.raw.dump", prop, "0");
555 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800556 property_get("persist.camera.hal3.force.hdr", prop, "0");
557 mForceHdrSnapshot = atoi(prop);
558
Thierry Strudel3d639192016-09-09 11:52:26 -0700559 if (mEnableRawDump)
560 LOGD("Raw dump from Camera HAL enabled");
561
562 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
563 memset(mLdafCalib, 0, sizeof(mLdafCalib));
564
565 memset(prop, 0, sizeof(prop));
566 property_get("persist.camera.tnr.preview", prop, "0");
567 m_bTnrPreview = (uint8_t)atoi(prop);
568
569 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800570 property_get("persist.camera.swtnr.preview", prop, "1");
571 m_bSwTnrPreview = (uint8_t)atoi(prop);
572
573 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700574 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700575 m_bTnrVideo = (uint8_t)atoi(prop);
576
577 memset(prop, 0, sizeof(prop));
578 property_get("persist.camera.avtimer.debug", prop, "0");
579 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800580 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700581
Thierry Strudel54dc9782017-02-15 12:12:10 -0800582 memset(prop, 0, sizeof(prop));
583 property_get("persist.camera.cacmode.disable", prop, "0");
584 m_cacModeDisabled = (uint8_t)atoi(prop);
585
Thierry Strudel3d639192016-09-09 11:52:26 -0700586 //Load and read GPU library.
587 lib_surface_utils = NULL;
588 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700589 mSurfaceStridePadding = CAM_PAD_TO_64;
590#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700591 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
592 if (lib_surface_utils) {
593 *(void **)&LINK_get_surface_pixel_alignment =
594 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
595 if (LINK_get_surface_pixel_alignment) {
596 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
597 }
598 dlclose(lib_surface_utils);
599 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700600#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000601 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
602 mPDSupported = (0 <= mPDIndex) ? true : false;
603
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700604 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700605}
606
607/*===========================================================================
608 * FUNCTION : ~QCamera3HardwareInterface
609 *
610 * DESCRIPTION: destructor of QCamera3HardwareInterface
611 *
612 * PARAMETERS : none
613 *
614 * RETURN : none
615 *==========================================================================*/
616QCamera3HardwareInterface::~QCamera3HardwareInterface()
617{
618 LOGD("E");
619
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800620 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700621
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800622 // Disable power hint and enable the perf lock for close camera
623 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
624 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
625
626 // unlink of dualcam during close camera
627 if (mIsDeviceLinked) {
628 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
629 &m_pDualCamCmdPtr->bundle_info;
630 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
631 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
632 pthread_mutex_lock(&gCamLock);
633
634 if (mIsMainCamera == 1) {
635 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
636 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
637 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
638 // related session id should be session id of linked session
639 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
640 } else {
641 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
642 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
643 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
644 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
645 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800646 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800647 pthread_mutex_unlock(&gCamLock);
648
649 rc = mCameraHandle->ops->set_dual_cam_cmd(
650 mCameraHandle->camera_handle);
651 if (rc < 0) {
652 LOGE("Dualcam: Unlink failed, but still proceed to close");
653 }
654 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700655
656 /* We need to stop all streams before deleting any stream */
657 if (mRawDumpChannel) {
658 mRawDumpChannel->stop();
659 }
660
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700661 if (mHdrPlusRawSrcChannel) {
662 mHdrPlusRawSrcChannel->stop();
663 }
664
Thierry Strudel3d639192016-09-09 11:52:26 -0700665 // NOTE: 'camera3_stream_t *' objects are already freed at
666 // this stage by the framework
667 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
668 it != mStreamInfo.end(); it++) {
669 QCamera3ProcessingChannel *channel = (*it)->channel;
670 if (channel) {
671 channel->stop();
672 }
673 }
674 if (mSupportChannel)
675 mSupportChannel->stop();
676
677 if (mAnalysisChannel) {
678 mAnalysisChannel->stop();
679 }
680 if (mMetadataChannel) {
681 mMetadataChannel->stop();
682 }
683 if (mChannelHandle) {
684 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
685 mChannelHandle);
686 LOGD("stopping channel %d", mChannelHandle);
687 }
688
689 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
690 it != mStreamInfo.end(); it++) {
691 QCamera3ProcessingChannel *channel = (*it)->channel;
692 if (channel)
693 delete channel;
694 free (*it);
695 }
696 if (mSupportChannel) {
697 delete mSupportChannel;
698 mSupportChannel = NULL;
699 }
700
701 if (mAnalysisChannel) {
702 delete mAnalysisChannel;
703 mAnalysisChannel = NULL;
704 }
705 if (mRawDumpChannel) {
706 delete mRawDumpChannel;
707 mRawDumpChannel = NULL;
708 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700709 if (mHdrPlusRawSrcChannel) {
710 delete mHdrPlusRawSrcChannel;
711 mHdrPlusRawSrcChannel = NULL;
712 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700713 if (mDummyBatchChannel) {
714 delete mDummyBatchChannel;
715 mDummyBatchChannel = NULL;
716 }
717
718 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800719 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700720
721 if (mMetadataChannel) {
722 delete mMetadataChannel;
723 mMetadataChannel = NULL;
724 }
725
726 /* Clean up all channels */
727 if (mCameraInitialized) {
728 if(!mFirstConfiguration){
729 //send the last unconfigure
730 cam_stream_size_info_t stream_config_info;
731 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
732 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
733 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800734 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700735 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700736 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700737 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
738 stream_config_info);
739 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
740 if (rc < 0) {
741 LOGE("set_parms failed for unconfigure");
742 }
743 }
744 deinitParameters();
745 }
746
747 if (mChannelHandle) {
748 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
749 mChannelHandle);
750 LOGH("deleting channel %d", mChannelHandle);
751 mChannelHandle = 0;
752 }
753
754 if (mState != CLOSED)
755 closeCamera();
756
757 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
758 req.mPendingBufferList.clear();
759 }
760 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700761 for (pendingRequestIterator i = mPendingRequestsList.begin();
762 i != mPendingRequestsList.end();) {
763 i = erasePendingRequest(i);
764 }
765 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
766 if (mDefaultMetadata[i])
767 free_camera_metadata(mDefaultMetadata[i]);
768
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800769 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700770
771 pthread_cond_destroy(&mRequestCond);
772
773 pthread_cond_destroy(&mBuffersCond);
774
775 pthread_mutex_destroy(&mMutex);
776 LOGD("X");
777}
778
779/*===========================================================================
780 * FUNCTION : erasePendingRequest
781 *
782 * DESCRIPTION: function to erase a desired pending request after freeing any
783 * allocated memory
784 *
785 * PARAMETERS :
786 * @i : iterator pointing to pending request to be erased
787 *
788 * RETURN : iterator pointing to the next request
789 *==========================================================================*/
790QCamera3HardwareInterface::pendingRequestIterator
791 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
792{
793 if (i->input_buffer != NULL) {
794 free(i->input_buffer);
795 i->input_buffer = NULL;
796 }
797 if (i->settings != NULL)
798 free_camera_metadata((camera_metadata_t*)i->settings);
799 return mPendingRequestsList.erase(i);
800}
801
802/*===========================================================================
803 * FUNCTION : camEvtHandle
804 *
805 * DESCRIPTION: Function registered to mm-camera-interface to handle events
806 *
807 * PARAMETERS :
808 * @camera_handle : interface layer camera handle
809 * @evt : ptr to event
810 * @user_data : user data ptr
811 *
812 * RETURN : none
813 *==========================================================================*/
814void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
815 mm_camera_event_t *evt,
816 void *user_data)
817{
818 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
819 if (obj && evt) {
820 switch(evt->server_event_type) {
821 case CAM_EVENT_TYPE_DAEMON_DIED:
822 pthread_mutex_lock(&obj->mMutex);
823 obj->mState = ERROR;
824 pthread_mutex_unlock(&obj->mMutex);
825 LOGE("Fatal, camera daemon died");
826 break;
827
828 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
829 LOGD("HAL got request pull from Daemon");
830 pthread_mutex_lock(&obj->mMutex);
831 obj->mWokenUpByDaemon = true;
832 obj->unblockRequestIfNecessary();
833 pthread_mutex_unlock(&obj->mMutex);
834 break;
835
836 default:
837 LOGW("Warning: Unhandled event %d",
838 evt->server_event_type);
839 break;
840 }
841 } else {
842 LOGE("NULL user_data/evt");
843 }
844}
845
846/*===========================================================================
847 * FUNCTION : openCamera
848 *
849 * DESCRIPTION: open camera
850 *
851 * PARAMETERS :
852 * @hw_device : double ptr for camera device struct
853 *
854 * RETURN : int32_t type of status
855 * NO_ERROR -- success
856 * none-zero failure code
857 *==========================================================================*/
858int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
859{
860 int rc = 0;
861 if (mState != CLOSED) {
862 *hw_device = NULL;
863 return PERMISSION_DENIED;
864 }
865
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700866 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800867 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700868 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
869 mCameraId);
870
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700871 if (mCameraHandle) {
872 LOGE("Failure: Camera already opened");
873 return ALREADY_EXISTS;
874 }
875
876 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700877 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700878 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700879 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700880 rc = gEaselManagerClient->resume();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700881 if (rc != 0) {
882 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
883 return rc;
884 }
885 }
886 }
887
Thierry Strudel3d639192016-09-09 11:52:26 -0700888 rc = openCamera();
889 if (rc == 0) {
890 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800891 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700892 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700893
894 // Suspend Easel because opening camera failed.
895 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700896 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700897 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
898 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700899 if (suspendErr != 0) {
900 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
901 strerror(-suspendErr), suspendErr);
902 }
903 }
904 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800905 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700906
Thierry Strudel3d639192016-09-09 11:52:26 -0700907 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
908 mCameraId, rc);
909
910 if (rc == NO_ERROR) {
911 mState = OPENED;
912 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800913
Thierry Strudel3d639192016-09-09 11:52:26 -0700914 return rc;
915}
916
917/*===========================================================================
918 * FUNCTION : openCamera
919 *
920 * DESCRIPTION: open camera
921 *
922 * PARAMETERS : none
923 *
924 * RETURN : int32_t type of status
925 * NO_ERROR -- success
926 * none-zero failure code
927 *==========================================================================*/
928int QCamera3HardwareInterface::openCamera()
929{
930 int rc = 0;
931 char value[PROPERTY_VALUE_MAX];
932
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800933 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800934
Thierry Strudel3d639192016-09-09 11:52:26 -0700935 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
936 if (rc < 0) {
937 LOGE("Failed to reserve flash for camera id: %d",
938 mCameraId);
939 return UNKNOWN_ERROR;
940 }
941
942 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
943 if (rc) {
944 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
945 return rc;
946 }
947
948 if (!mCameraHandle) {
949 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
950 return -ENODEV;
951 }
952
953 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
954 camEvtHandle, (void *)this);
955
956 if (rc < 0) {
957 LOGE("Error, failed to register event callback");
958 /* Not closing camera here since it is already handled in destructor */
959 return FAILED_TRANSACTION;
960 }
961
962 mExifParams.debug_params =
963 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
964 if (mExifParams.debug_params) {
965 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
966 } else {
967 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
968 return NO_MEMORY;
969 }
970 mFirstConfiguration = true;
971
972 //Notify display HAL that a camera session is active.
973 //But avoid calling the same during bootup because camera service might open/close
974 //cameras at boot time during its initialization and display service will also internally
975 //wait for camera service to initialize first while calling this display API, resulting in a
976 //deadlock situation. Since boot time camera open/close calls are made only to fetch
977 //capabilities, no need of this display bw optimization.
978 //Use "service.bootanim.exit" property to know boot status.
979 property_get("service.bootanim.exit", value, "0");
980 if (atoi(value) == 1) {
981 pthread_mutex_lock(&gCamLock);
982 if (gNumCameraSessions++ == 0) {
983 setCameraLaunchStatus(true);
984 }
985 pthread_mutex_unlock(&gCamLock);
986 }
987
988 //fill the session id needed while linking dual cam
989 pthread_mutex_lock(&gCamLock);
990 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
991 &sessionId[mCameraId]);
992 pthread_mutex_unlock(&gCamLock);
993
994 if (rc < 0) {
995 LOGE("Error, failed to get sessiion id");
996 return UNKNOWN_ERROR;
997 } else {
998 //Allocate related cam sync buffer
999 //this is needed for the payload that goes along with bundling cmd for related
1000 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001001 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1002 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001003 if(rc != OK) {
1004 rc = NO_MEMORY;
1005 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1006 return NO_MEMORY;
1007 }
1008
1009 //Map memory for related cam sync buffer
1010 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001011 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1012 m_pDualCamCmdHeap->getFd(0),
1013 sizeof(cam_dual_camera_cmd_info_t),
1014 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001015 if(rc < 0) {
1016 LOGE("Dualcam: failed to map Related cam sync buffer");
1017 rc = FAILED_TRANSACTION;
1018 return NO_MEMORY;
1019 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001020 m_pDualCamCmdPtr =
1021 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001022 }
1023
1024 LOGH("mCameraId=%d",mCameraId);
1025
1026 return NO_ERROR;
1027}
1028
1029/*===========================================================================
1030 * FUNCTION : closeCamera
1031 *
1032 * DESCRIPTION: close camera
1033 *
1034 * PARAMETERS : none
1035 *
1036 * RETURN : int32_t type of status
1037 * NO_ERROR -- success
1038 * none-zero failure code
1039 *==========================================================================*/
1040int QCamera3HardwareInterface::closeCamera()
1041{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001042 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001043 int rc = NO_ERROR;
1044 char value[PROPERTY_VALUE_MAX];
1045
1046 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1047 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001048
1049 // unmap memory for related cam sync buffer
1050 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001051 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001052 if (NULL != m_pDualCamCmdHeap) {
1053 m_pDualCamCmdHeap->deallocate();
1054 delete m_pDualCamCmdHeap;
1055 m_pDualCamCmdHeap = NULL;
1056 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001057 }
1058
Thierry Strudel3d639192016-09-09 11:52:26 -07001059 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1060 mCameraHandle = NULL;
1061
1062 //reset session id to some invalid id
1063 pthread_mutex_lock(&gCamLock);
1064 sessionId[mCameraId] = 0xDEADBEEF;
1065 pthread_mutex_unlock(&gCamLock);
1066
1067 //Notify display HAL that there is no active camera session
1068 //but avoid calling the same during bootup. Refer to openCamera
1069 //for more details.
1070 property_get("service.bootanim.exit", value, "0");
1071 if (atoi(value) == 1) {
1072 pthread_mutex_lock(&gCamLock);
1073 if (--gNumCameraSessions == 0) {
1074 setCameraLaunchStatus(false);
1075 }
1076 pthread_mutex_unlock(&gCamLock);
1077 }
1078
Thierry Strudel3d639192016-09-09 11:52:26 -07001079 if (mExifParams.debug_params) {
1080 free(mExifParams.debug_params);
1081 mExifParams.debug_params = NULL;
1082 }
1083 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1084 LOGW("Failed to release flash for camera id: %d",
1085 mCameraId);
1086 }
1087 mState = CLOSED;
1088 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1089 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001090
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001091 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001092 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1093 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001094 if (gHdrPlusClient != nullptr) {
1095 // Disable HDR+ mode.
1096 disableHdrPlusModeLocked();
1097 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001098 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001099 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001100 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001101
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001102 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001103 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001104 if (rc != 0) {
1105 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1106 }
1107
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001108 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001109 if (rc != 0) {
1110 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1111 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001112 }
1113 }
1114
Thierry Strudel3d639192016-09-09 11:52:26 -07001115 return rc;
1116}
1117
1118/*===========================================================================
1119 * FUNCTION : initialize
1120 *
1121 * DESCRIPTION: Initialize frameworks callback functions
1122 *
1123 * PARAMETERS :
1124 * @callback_ops : callback function to frameworks
1125 *
1126 * RETURN :
1127 *
1128 *==========================================================================*/
1129int QCamera3HardwareInterface::initialize(
1130 const struct camera3_callback_ops *callback_ops)
1131{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001132 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001133 int rc;
1134
1135 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1136 pthread_mutex_lock(&mMutex);
1137
1138 // Validate current state
1139 switch (mState) {
1140 case OPENED:
1141 /* valid state */
1142 break;
1143 default:
1144 LOGE("Invalid state %d", mState);
1145 rc = -ENODEV;
1146 goto err1;
1147 }
1148
1149 rc = initParameters();
1150 if (rc < 0) {
1151 LOGE("initParamters failed %d", rc);
1152 goto err1;
1153 }
1154 mCallbackOps = callback_ops;
1155
1156 mChannelHandle = mCameraHandle->ops->add_channel(
1157 mCameraHandle->camera_handle, NULL, NULL, this);
1158 if (mChannelHandle == 0) {
1159 LOGE("add_channel failed");
1160 rc = -ENOMEM;
1161 pthread_mutex_unlock(&mMutex);
1162 return rc;
1163 }
1164
1165 pthread_mutex_unlock(&mMutex);
1166 mCameraInitialized = true;
1167 mState = INITIALIZED;
1168 LOGI("X");
1169 return 0;
1170
1171err1:
1172 pthread_mutex_unlock(&mMutex);
1173 return rc;
1174}
1175
1176/*===========================================================================
1177 * FUNCTION : validateStreamDimensions
1178 *
1179 * DESCRIPTION: Check if the configuration requested are those advertised
1180 *
1181 * PARAMETERS :
1182 * @stream_list : streams to be configured
1183 *
1184 * RETURN :
1185 *
1186 *==========================================================================*/
1187int QCamera3HardwareInterface::validateStreamDimensions(
1188 camera3_stream_configuration_t *streamList)
1189{
1190 int rc = NO_ERROR;
1191 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001192 uint32_t depthWidth = 0;
1193 uint32_t depthHeight = 0;
1194 if (mPDSupported) {
1195 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1196 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1197 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001198
1199 camera3_stream_t *inputStream = NULL;
1200 /*
1201 * Loop through all streams to find input stream if it exists*
1202 */
1203 for (size_t i = 0; i< streamList->num_streams; i++) {
1204 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1205 if (inputStream != NULL) {
1206 LOGE("Error, Multiple input streams requested");
1207 return -EINVAL;
1208 }
1209 inputStream = streamList->streams[i];
1210 }
1211 }
1212 /*
1213 * Loop through all streams requested in configuration
1214 * Check if unsupported sizes have been requested on any of them
1215 */
1216 for (size_t j = 0; j < streamList->num_streams; j++) {
1217 bool sizeFound = false;
1218 camera3_stream_t *newStream = streamList->streams[j];
1219
1220 uint32_t rotatedHeight = newStream->height;
1221 uint32_t rotatedWidth = newStream->width;
1222 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1223 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1224 rotatedHeight = newStream->width;
1225 rotatedWidth = newStream->height;
1226 }
1227
1228 /*
1229 * Sizes are different for each type of stream format check against
1230 * appropriate table.
1231 */
1232 switch (newStream->format) {
1233 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1234 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1235 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001236 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1237 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1238 mPDSupported) {
1239 if ((depthWidth == newStream->width) &&
1240 (depthHeight == newStream->height)) {
1241 sizeFound = true;
1242 }
1243 break;
1244 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001245 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1246 for (size_t i = 0; i < count; i++) {
1247 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1248 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1249 sizeFound = true;
1250 break;
1251 }
1252 }
1253 break;
1254 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1256 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001257 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001258 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001259 if ((depthSamplesCount == newStream->width) &&
1260 (1 == newStream->height)) {
1261 sizeFound = true;
1262 }
1263 break;
1264 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001265 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1266 /* Verify set size against generated sizes table */
1267 for (size_t i = 0; i < count; i++) {
1268 if (((int32_t)rotatedWidth ==
1269 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1270 ((int32_t)rotatedHeight ==
1271 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1272 sizeFound = true;
1273 break;
1274 }
1275 }
1276 break;
1277 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1278 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1279 default:
1280 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1281 || newStream->stream_type == CAMERA3_STREAM_INPUT
1282 || IS_USAGE_ZSL(newStream->usage)) {
1283 if (((int32_t)rotatedWidth ==
1284 gCamCapability[mCameraId]->active_array_size.width) &&
1285 ((int32_t)rotatedHeight ==
1286 gCamCapability[mCameraId]->active_array_size.height)) {
1287 sizeFound = true;
1288 break;
1289 }
1290 /* We could potentially break here to enforce ZSL stream
1291 * set from frameworks always is full active array size
1292 * but it is not clear from the spc if framework will always
1293 * follow that, also we have logic to override to full array
1294 * size, so keeping the logic lenient at the moment
1295 */
1296 }
1297 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1298 MAX_SIZES_CNT);
1299 for (size_t i = 0; i < count; i++) {
1300 if (((int32_t)rotatedWidth ==
1301 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1302 ((int32_t)rotatedHeight ==
1303 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1304 sizeFound = true;
1305 break;
1306 }
1307 }
1308 break;
1309 } /* End of switch(newStream->format) */
1310
1311 /* We error out even if a single stream has unsupported size set */
1312 if (!sizeFound) {
1313 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1314 rotatedWidth, rotatedHeight, newStream->format,
1315 gCamCapability[mCameraId]->active_array_size.width,
1316 gCamCapability[mCameraId]->active_array_size.height);
1317 rc = -EINVAL;
1318 break;
1319 }
1320 } /* End of for each stream */
1321 return rc;
1322}
1323
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001324/*===========================================================================
1325 * FUNCTION : validateUsageFlags
1326 *
1327 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1328 *
1329 * PARAMETERS :
1330 * @stream_list : streams to be configured
1331 *
1332 * RETURN :
1333 * NO_ERROR if the usage flags are supported
1334 * error code if usage flags are not supported
1335 *
1336 *==========================================================================*/
1337int QCamera3HardwareInterface::validateUsageFlags(
1338 const camera3_stream_configuration_t* streamList)
1339{
1340 for (size_t j = 0; j < streamList->num_streams; j++) {
1341 const camera3_stream_t *newStream = streamList->streams[j];
1342
1343 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1344 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1345 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1346 continue;
1347 }
1348
Jason Leec4cf5032017-05-24 18:31:41 -07001349 // Here we only care whether it's EIS3 or not
1350 char is_type_value[PROPERTY_VALUE_MAX];
1351 property_get("persist.camera.is_type", is_type_value, "4");
1352 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1353 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1354 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1355 isType = IS_TYPE_NONE;
1356
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001357 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1358 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1359 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1360 bool forcePreviewUBWC = true;
1361 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1362 forcePreviewUBWC = false;
1363 }
1364 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001365 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001366 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001367 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001369 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001370
1371 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1372 // So color spaces will always match.
1373
1374 // Check whether underlying formats of shared streams match.
1375 if (isVideo && isPreview && videoFormat != previewFormat) {
1376 LOGE("Combined video and preview usage flag is not supported");
1377 return -EINVAL;
1378 }
1379 if (isPreview && isZSL && previewFormat != zslFormat) {
1380 LOGE("Combined preview and zsl usage flag is not supported");
1381 return -EINVAL;
1382 }
1383 if (isVideo && isZSL && videoFormat != zslFormat) {
1384 LOGE("Combined video and zsl usage flag is not supported");
1385 return -EINVAL;
1386 }
1387 }
1388 return NO_ERROR;
1389}
1390
1391/*===========================================================================
1392 * FUNCTION : validateUsageFlagsForEis
1393 *
1394 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 *
1399 * RETURN :
1400 * NO_ERROR if the usage flags are supported
1401 * error code if usage flags are not supported
1402 *
1403 *==========================================================================*/
1404int QCamera3HardwareInterface::validateUsageFlagsForEis(
1405 const camera3_stream_configuration_t* streamList)
1406{
1407 for (size_t j = 0; j < streamList->num_streams; j++) {
1408 const camera3_stream_t *newStream = streamList->streams[j];
1409
1410 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1411 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1412
1413 // Because EIS is "hard-coded" for certain use case, and current
1414 // implementation doesn't support shared preview and video on the same
1415 // stream, return failure if EIS is forced on.
1416 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1417 LOGE("Combined video and preview usage flag is not supported due to EIS");
1418 return -EINVAL;
1419 }
1420 }
1421 return NO_ERROR;
1422}
1423
Thierry Strudel3d639192016-09-09 11:52:26 -07001424/*==============================================================================
1425 * FUNCTION : isSupportChannelNeeded
1426 *
1427 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1428 *
1429 * PARAMETERS :
1430 * @stream_list : streams to be configured
1431 * @stream_config_info : the config info for streams to be configured
1432 *
1433 * RETURN : Boolen true/false decision
1434 *
1435 *==========================================================================*/
1436bool QCamera3HardwareInterface::isSupportChannelNeeded(
1437 camera3_stream_configuration_t *streamList,
1438 cam_stream_size_info_t stream_config_info)
1439{
1440 uint32_t i;
1441 bool pprocRequested = false;
1442 /* Check for conditions where PProc pipeline does not have any streams*/
1443 for (i = 0; i < stream_config_info.num_streams; i++) {
1444 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1445 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1446 pprocRequested = true;
1447 break;
1448 }
1449 }
1450
1451 if (pprocRequested == false )
1452 return true;
1453
1454 /* Dummy stream needed if only raw or jpeg streams present */
1455 for (i = 0; i < streamList->num_streams; i++) {
1456 switch(streamList->streams[i]->format) {
1457 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1458 case HAL_PIXEL_FORMAT_RAW10:
1459 case HAL_PIXEL_FORMAT_RAW16:
1460 case HAL_PIXEL_FORMAT_BLOB:
1461 break;
1462 default:
1463 return false;
1464 }
1465 }
1466 return true;
1467}
1468
1469/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001470 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001471 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001473 *
1474 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001475 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001476 *
1477 * RETURN : int32_t type of status
1478 * NO_ERROR -- success
1479 * none-zero failure code
1480 *
1481 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001483{
1484 int32_t rc = NO_ERROR;
1485
1486 cam_dimension_t max_dim = {0, 0};
1487 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1488 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1489 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1490 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1491 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1492 }
1493
1494 clear_metadata_buffer(mParameters);
1495
1496 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1497 max_dim);
1498 if (rc != NO_ERROR) {
1499 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1500 return rc;
1501 }
1502
1503 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1504 if (rc != NO_ERROR) {
1505 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1506 return rc;
1507 }
1508
1509 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001510 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001511
1512 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1513 mParameters);
1514 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001515 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001516 return rc;
1517 }
1518
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001519 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001520 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1521 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1522 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1523 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1524 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001525
1526 return rc;
1527}
1528
1529/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001530 * FUNCTION : getCurrentSensorModeInfo
1531 *
1532 * DESCRIPTION: Get sensor mode information that is currently selected.
1533 *
1534 * PARAMETERS :
1535 * @sensorModeInfo : sensor mode information (output)
1536 *
1537 * RETURN : int32_t type of status
1538 * NO_ERROR -- success
1539 * none-zero failure code
1540 *
1541 *==========================================================================*/
1542int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1543{
1544 int32_t rc = NO_ERROR;
1545
1546 clear_metadata_buffer(mParameters);
1547 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1548
1549 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1550 mParameters);
1551 if (rc != NO_ERROR) {
1552 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1553 return rc;
1554 }
1555
1556 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1557 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1558 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1559 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1560 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1561 sensorModeInfo.num_raw_bits);
1562
1563 return rc;
1564}
1565
1566/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001567 * FUNCTION : addToPPFeatureMask
1568 *
1569 * DESCRIPTION: add additional features to pp feature mask based on
1570 * stream type and usecase
1571 *
1572 * PARAMETERS :
1573 * @stream_format : stream type for feature mask
1574 * @stream_idx : stream idx within postprocess_mask list to change
1575 *
1576 * RETURN : NULL
1577 *
1578 *==========================================================================*/
1579void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1580 uint32_t stream_idx)
1581{
1582 char feature_mask_value[PROPERTY_VALUE_MAX];
1583 cam_feature_mask_t feature_mask;
1584 int args_converted;
1585 int property_len;
1586
1587 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001588#ifdef _LE_CAMERA_
1589 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1590 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1591 property_len = property_get("persist.camera.hal3.feature",
1592 feature_mask_value, swtnr_feature_mask_value);
1593#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001594 property_len = property_get("persist.camera.hal3.feature",
1595 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001596#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001597 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1598 (feature_mask_value[1] == 'x')) {
1599 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1600 } else {
1601 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1602 }
1603 if (1 != args_converted) {
1604 feature_mask = 0;
1605 LOGE("Wrong feature mask %s", feature_mask_value);
1606 return;
1607 }
1608
1609 switch (stream_format) {
1610 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1611 /* Add LLVD to pp feature mask only if video hint is enabled */
1612 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1613 mStreamConfigInfo.postprocess_mask[stream_idx]
1614 |= CAM_QTI_FEATURE_SW_TNR;
1615 LOGH("Added SW TNR to pp feature mask");
1616 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1617 mStreamConfigInfo.postprocess_mask[stream_idx]
1618 |= CAM_QCOM_FEATURE_LLVD;
1619 LOGH("Added LLVD SeeMore to pp feature mask");
1620 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001621 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1622 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1623 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1624 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001625 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1626 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1627 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1628 CAM_QTI_FEATURE_BINNING_CORRECTION;
1629 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001630 break;
1631 }
1632 default:
1633 break;
1634 }
1635 LOGD("PP feature mask %llx",
1636 mStreamConfigInfo.postprocess_mask[stream_idx]);
1637}
1638
1639/*==============================================================================
1640 * FUNCTION : updateFpsInPreviewBuffer
1641 *
1642 * DESCRIPTION: update FPS information in preview buffer.
1643 *
1644 * PARAMETERS :
1645 * @metadata : pointer to metadata buffer
1646 * @frame_number: frame_number to look for in pending buffer list
1647 *
1648 * RETURN : None
1649 *
1650 *==========================================================================*/
1651void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1652 uint32_t frame_number)
1653{
1654 // Mark all pending buffers for this particular request
1655 // with corresponding framerate information
1656 for (List<PendingBuffersInRequest>::iterator req =
1657 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1658 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1659 for(List<PendingBufferInfo>::iterator j =
1660 req->mPendingBufferList.begin();
1661 j != req->mPendingBufferList.end(); j++) {
1662 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1663 if ((req->frame_number == frame_number) &&
1664 (channel->getStreamTypeMask() &
1665 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1666 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1667 CAM_INTF_PARM_FPS_RANGE, metadata) {
1668 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1669 struct private_handle_t *priv_handle =
1670 (struct private_handle_t *)(*(j->buffer));
1671 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1672 }
1673 }
1674 }
1675 }
1676}
1677
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001678/*==============================================================================
1679 * FUNCTION : updateTimeStampInPendingBuffers
1680 *
1681 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1682 * of a frame number
1683 *
1684 * PARAMETERS :
1685 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1686 * @timestamp : timestamp to be set
1687 *
1688 * RETURN : None
1689 *
1690 *==========================================================================*/
1691void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1692 uint32_t frameNumber, nsecs_t timestamp)
1693{
1694 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1695 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1696 if (req->frame_number != frameNumber)
1697 continue;
1698
1699 for (auto k = req->mPendingBufferList.begin();
1700 k != req->mPendingBufferList.end(); k++ ) {
1701 struct private_handle_t *priv_handle =
1702 (struct private_handle_t *) (*(k->buffer));
1703 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1704 }
1705 }
1706 return;
1707}
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709/*===========================================================================
1710 * FUNCTION : configureStreams
1711 *
1712 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1713 * and output streams.
1714 *
1715 * PARAMETERS :
1716 * @stream_list : streams to be configured
1717 *
1718 * RETURN :
1719 *
1720 *==========================================================================*/
1721int QCamera3HardwareInterface::configureStreams(
1722 camera3_stream_configuration_t *streamList)
1723{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001724 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001725 int rc = 0;
1726
1727 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001728 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001729 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001730 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001731
1732 return rc;
1733}
1734
1735/*===========================================================================
1736 * FUNCTION : configureStreamsPerfLocked
1737 *
1738 * DESCRIPTION: configureStreams while perfLock is held.
1739 *
1740 * PARAMETERS :
1741 * @stream_list : streams to be configured
1742 *
1743 * RETURN : int32_t type of status
1744 * NO_ERROR -- success
1745 * none-zero failure code
1746 *==========================================================================*/
1747int QCamera3HardwareInterface::configureStreamsPerfLocked(
1748 camera3_stream_configuration_t *streamList)
1749{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001750 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001751 int rc = 0;
1752
1753 // Sanity check stream_list
1754 if (streamList == NULL) {
1755 LOGE("NULL stream configuration");
1756 return BAD_VALUE;
1757 }
1758 if (streamList->streams == NULL) {
1759 LOGE("NULL stream list");
1760 return BAD_VALUE;
1761 }
1762
1763 if (streamList->num_streams < 1) {
1764 LOGE("Bad number of streams requested: %d",
1765 streamList->num_streams);
1766 return BAD_VALUE;
1767 }
1768
1769 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1770 LOGE("Maximum number of streams %d exceeded: %d",
1771 MAX_NUM_STREAMS, streamList->num_streams);
1772 return BAD_VALUE;
1773 }
1774
Jason Leec4cf5032017-05-24 18:31:41 -07001775 mOpMode = streamList->operation_mode;
1776 LOGD("mOpMode: %d", mOpMode);
1777
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001778 rc = validateUsageFlags(streamList);
1779 if (rc != NO_ERROR) {
1780 return rc;
1781 }
1782
Thierry Strudel3d639192016-09-09 11:52:26 -07001783 /* first invalidate all the steams in the mStreamList
1784 * if they appear again, they will be validated */
1785 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1786 it != mStreamInfo.end(); it++) {
1787 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1788 if (channel) {
1789 channel->stop();
1790 }
1791 (*it)->status = INVALID;
1792 }
1793
1794 if (mRawDumpChannel) {
1795 mRawDumpChannel->stop();
1796 delete mRawDumpChannel;
1797 mRawDumpChannel = NULL;
1798 }
1799
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001800 if (mHdrPlusRawSrcChannel) {
1801 mHdrPlusRawSrcChannel->stop();
1802 delete mHdrPlusRawSrcChannel;
1803 mHdrPlusRawSrcChannel = NULL;
1804 }
1805
Thierry Strudel3d639192016-09-09 11:52:26 -07001806 if (mSupportChannel)
1807 mSupportChannel->stop();
1808
1809 if (mAnalysisChannel) {
1810 mAnalysisChannel->stop();
1811 }
1812 if (mMetadataChannel) {
1813 /* If content of mStreamInfo is not 0, there is metadata stream */
1814 mMetadataChannel->stop();
1815 }
1816 if (mChannelHandle) {
1817 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1818 mChannelHandle);
1819 LOGD("stopping channel %d", mChannelHandle);
1820 }
1821
1822 pthread_mutex_lock(&mMutex);
1823
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001824 mPictureChannel = NULL;
1825
Thierry Strudel3d639192016-09-09 11:52:26 -07001826 // Check state
1827 switch (mState) {
1828 case INITIALIZED:
1829 case CONFIGURED:
1830 case STARTED:
1831 /* valid state */
1832 break;
1833 default:
1834 LOGE("Invalid state %d", mState);
1835 pthread_mutex_unlock(&mMutex);
1836 return -ENODEV;
1837 }
1838
1839 /* Check whether we have video stream */
1840 m_bIs4KVideo = false;
1841 m_bIsVideo = false;
1842 m_bEisSupportedSize = false;
1843 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001844 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001846 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001847 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001848 uint32_t videoWidth = 0U;
1849 uint32_t videoHeight = 0U;
1850 size_t rawStreamCnt = 0;
1851 size_t stallStreamCnt = 0;
1852 size_t processedStreamCnt = 0;
1853 // Number of streams on ISP encoder path
1854 size_t numStreamsOnEncoder = 0;
1855 size_t numYuv888OnEncoder = 0;
1856 bool bYuv888OverrideJpeg = false;
1857 cam_dimension_t largeYuv888Size = {0, 0};
1858 cam_dimension_t maxViewfinderSize = {0, 0};
1859 bool bJpegExceeds4K = false;
1860 bool bJpegOnEncoder = false;
1861 bool bUseCommonFeatureMask = false;
1862 cam_feature_mask_t commonFeatureMask = 0;
1863 bool bSmallJpegSize = false;
1864 uint32_t width_ratio;
1865 uint32_t height_ratio;
1866 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1867 camera3_stream_t *inputStream = NULL;
1868 bool isJpeg = false;
1869 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001870 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001871 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001872
1873 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1874
1875 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001876 uint8_t eis_prop_set;
1877 uint32_t maxEisWidth = 0;
1878 uint32_t maxEisHeight = 0;
1879
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001880 // Initialize all instant AEC related variables
1881 mInstantAEC = false;
1882 mResetInstantAEC = false;
1883 mInstantAECSettledFrameNumber = 0;
1884 mAecSkipDisplayFrameBound = 0;
1885 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001886 mCurrFeatureState = 0;
1887 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001888
Thierry Strudel3d639192016-09-09 11:52:26 -07001889 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1890
1891 size_t count = IS_TYPE_MAX;
1892 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1893 for (size_t i = 0; i < count; i++) {
1894 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001895 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1896 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001897 break;
1898 }
1899 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001900
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001901 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001902 maxEisWidth = MAX_EIS_WIDTH;
1903 maxEisHeight = MAX_EIS_HEIGHT;
1904 }
1905
1906 /* EIS setprop control */
1907 char eis_prop[PROPERTY_VALUE_MAX];
1908 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001909 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 eis_prop_set = (uint8_t)atoi(eis_prop);
1911
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001912 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001913 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1914
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001915 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1916 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001917
Thierry Strudel3d639192016-09-09 11:52:26 -07001918 /* stream configurations */
1919 for (size_t i = 0; i < streamList->num_streams; i++) {
1920 camera3_stream_t *newStream = streamList->streams[i];
1921 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1922 "height = %d, rotation = %d, usage = 0x%x",
1923 i, newStream->stream_type, newStream->format,
1924 newStream->width, newStream->height, newStream->rotation,
1925 newStream->usage);
1926 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1927 newStream->stream_type == CAMERA3_STREAM_INPUT){
1928 isZsl = true;
1929 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001930 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1931 IS_USAGE_PREVIEW(newStream->usage)) {
1932 isPreview = true;
1933 }
1934
Thierry Strudel3d639192016-09-09 11:52:26 -07001935 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1936 inputStream = newStream;
1937 }
1938
Emilian Peev7650c122017-01-19 08:24:33 -08001939 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1940 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001941 isJpeg = true;
1942 jpegSize.width = newStream->width;
1943 jpegSize.height = newStream->height;
1944 if (newStream->width > VIDEO_4K_WIDTH ||
1945 newStream->height > VIDEO_4K_HEIGHT)
1946 bJpegExceeds4K = true;
1947 }
1948
1949 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1950 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1951 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001952 // In HAL3 we can have multiple different video streams.
1953 // The variables video width and height are used below as
1954 // dimensions of the biggest of them
1955 if (videoWidth < newStream->width ||
1956 videoHeight < newStream->height) {
1957 videoWidth = newStream->width;
1958 videoHeight = newStream->height;
1959 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001960 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1961 (VIDEO_4K_HEIGHT <= newStream->height)) {
1962 m_bIs4KVideo = true;
1963 }
1964 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1965 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001966
Thierry Strudel3d639192016-09-09 11:52:26 -07001967 }
1968 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1969 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1970 switch (newStream->format) {
1971 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001972 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1973 depthPresent = true;
1974 break;
1975 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001976 stallStreamCnt++;
1977 if (isOnEncoder(maxViewfinderSize, newStream->width,
1978 newStream->height)) {
1979 numStreamsOnEncoder++;
1980 bJpegOnEncoder = true;
1981 }
1982 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1983 newStream->width);
1984 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1985 newStream->height);;
1986 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1987 "FATAL: max_downscale_factor cannot be zero and so assert");
1988 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1989 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1990 LOGH("Setting small jpeg size flag to true");
1991 bSmallJpegSize = true;
1992 }
1993 break;
1994 case HAL_PIXEL_FORMAT_RAW10:
1995 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1996 case HAL_PIXEL_FORMAT_RAW16:
1997 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001998 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1999 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2000 pdStatCount++;
2001 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002002 break;
2003 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2004 processedStreamCnt++;
2005 if (isOnEncoder(maxViewfinderSize, newStream->width,
2006 newStream->height)) {
2007 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2008 !IS_USAGE_ZSL(newStream->usage)) {
2009 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2010 }
2011 numStreamsOnEncoder++;
2012 }
2013 break;
2014 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2015 processedStreamCnt++;
2016 if (isOnEncoder(maxViewfinderSize, newStream->width,
2017 newStream->height)) {
2018 // If Yuv888 size is not greater than 4K, set feature mask
2019 // to SUPERSET so that it support concurrent request on
2020 // YUV and JPEG.
2021 if (newStream->width <= VIDEO_4K_WIDTH &&
2022 newStream->height <= VIDEO_4K_HEIGHT) {
2023 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2024 }
2025 numStreamsOnEncoder++;
2026 numYuv888OnEncoder++;
2027 largeYuv888Size.width = newStream->width;
2028 largeYuv888Size.height = newStream->height;
2029 }
2030 break;
2031 default:
2032 processedStreamCnt++;
2033 if (isOnEncoder(maxViewfinderSize, newStream->width,
2034 newStream->height)) {
2035 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2036 numStreamsOnEncoder++;
2037 }
2038 break;
2039 }
2040
2041 }
2042 }
2043
2044 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2045 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2046 !m_bIsVideo) {
2047 m_bEisEnable = false;
2048 }
2049
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002050 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2051 pthread_mutex_unlock(&mMutex);
2052 return -EINVAL;
2053 }
2054
Thierry Strudel54dc9782017-02-15 12:12:10 -08002055 uint8_t forceEnableTnr = 0;
2056 char tnr_prop[PROPERTY_VALUE_MAX];
2057 memset(tnr_prop, 0, sizeof(tnr_prop));
2058 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2059 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2060
Thierry Strudel3d639192016-09-09 11:52:26 -07002061 /* Logic to enable/disable TNR based on specific config size/etc.*/
2062 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002063 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2064 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002065 else if (forceEnableTnr)
2066 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002067
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002068 char videoHdrProp[PROPERTY_VALUE_MAX];
2069 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2070 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2071 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2072
2073 if (hdr_mode_prop == 1 && m_bIsVideo &&
2074 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2075 m_bVideoHdrEnabled = true;
2076 else
2077 m_bVideoHdrEnabled = false;
2078
2079
Thierry Strudel3d639192016-09-09 11:52:26 -07002080 /* Check if num_streams is sane */
2081 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2082 rawStreamCnt > MAX_RAW_STREAMS ||
2083 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2084 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2085 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2086 pthread_mutex_unlock(&mMutex);
2087 return -EINVAL;
2088 }
2089 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002090 if (isZsl && m_bIs4KVideo) {
2091 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002092 pthread_mutex_unlock(&mMutex);
2093 return -EINVAL;
2094 }
2095 /* Check if stream sizes are sane */
2096 if (numStreamsOnEncoder > 2) {
2097 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2098 pthread_mutex_unlock(&mMutex);
2099 return -EINVAL;
2100 } else if (1 < numStreamsOnEncoder){
2101 bUseCommonFeatureMask = true;
2102 LOGH("Multiple streams above max viewfinder size, common mask needed");
2103 }
2104
2105 /* Check if BLOB size is greater than 4k in 4k recording case */
2106 if (m_bIs4KVideo && bJpegExceeds4K) {
2107 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2108 pthread_mutex_unlock(&mMutex);
2109 return -EINVAL;
2110 }
2111
Emilian Peev7650c122017-01-19 08:24:33 -08002112 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2113 depthPresent) {
2114 LOGE("HAL doesn't support depth streams in HFR mode!");
2115 pthread_mutex_unlock(&mMutex);
2116 return -EINVAL;
2117 }
2118
Thierry Strudel3d639192016-09-09 11:52:26 -07002119 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2120 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2121 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2122 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2123 // configurations:
2124 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2125 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2126 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2127 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2128 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2129 __func__);
2130 pthread_mutex_unlock(&mMutex);
2131 return -EINVAL;
2132 }
2133
2134 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2135 // the YUV stream's size is greater or equal to the JPEG size, set common
2136 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2137 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2138 jpegSize.width, jpegSize.height) &&
2139 largeYuv888Size.width > jpegSize.width &&
2140 largeYuv888Size.height > jpegSize.height) {
2141 bYuv888OverrideJpeg = true;
2142 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2143 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2144 }
2145
2146 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2147 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2148 commonFeatureMask);
2149 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2150 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2151
2152 rc = validateStreamDimensions(streamList);
2153 if (rc == NO_ERROR) {
2154 rc = validateStreamRotations(streamList);
2155 }
2156 if (rc != NO_ERROR) {
2157 LOGE("Invalid stream configuration requested!");
2158 pthread_mutex_unlock(&mMutex);
2159 return rc;
2160 }
2161
Emilian Peev0f3c3162017-03-15 12:57:46 +00002162 if (1 < pdStatCount) {
2163 LOGE("HAL doesn't support multiple PD streams");
2164 pthread_mutex_unlock(&mMutex);
2165 return -EINVAL;
2166 }
2167
2168 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2169 (1 == pdStatCount)) {
2170 LOGE("HAL doesn't support PD streams in HFR mode!");
2171 pthread_mutex_unlock(&mMutex);
2172 return -EINVAL;
2173 }
2174
Thierry Strudel3d639192016-09-09 11:52:26 -07002175 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2176 for (size_t i = 0; i < streamList->num_streams; i++) {
2177 camera3_stream_t *newStream = streamList->streams[i];
2178 LOGH("newStream type = %d, stream format = %d "
2179 "stream size : %d x %d, stream rotation = %d",
2180 newStream->stream_type, newStream->format,
2181 newStream->width, newStream->height, newStream->rotation);
2182 //if the stream is in the mStreamList validate it
2183 bool stream_exists = false;
2184 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2185 it != mStreamInfo.end(); it++) {
2186 if ((*it)->stream == newStream) {
2187 QCamera3ProcessingChannel *channel =
2188 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2189 stream_exists = true;
2190 if (channel)
2191 delete channel;
2192 (*it)->status = VALID;
2193 (*it)->stream->priv = NULL;
2194 (*it)->channel = NULL;
2195 }
2196 }
2197 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2198 //new stream
2199 stream_info_t* stream_info;
2200 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2201 if (!stream_info) {
2202 LOGE("Could not allocate stream info");
2203 rc = -ENOMEM;
2204 pthread_mutex_unlock(&mMutex);
2205 return rc;
2206 }
2207 stream_info->stream = newStream;
2208 stream_info->status = VALID;
2209 stream_info->channel = NULL;
2210 mStreamInfo.push_back(stream_info);
2211 }
2212 /* Covers Opaque ZSL and API1 F/W ZSL */
2213 if (IS_USAGE_ZSL(newStream->usage)
2214 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2215 if (zslStream != NULL) {
2216 LOGE("Multiple input/reprocess streams requested!");
2217 pthread_mutex_unlock(&mMutex);
2218 return BAD_VALUE;
2219 }
2220 zslStream = newStream;
2221 }
2222 /* Covers YUV reprocess */
2223 if (inputStream != NULL) {
2224 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2225 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2226 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2227 && inputStream->width == newStream->width
2228 && inputStream->height == newStream->height) {
2229 if (zslStream != NULL) {
2230 /* This scenario indicates multiple YUV streams with same size
2231 * as input stream have been requested, since zsl stream handle
2232 * is solely use for the purpose of overriding the size of streams
2233 * which share h/w streams we will just make a guess here as to
2234 * which of the stream is a ZSL stream, this will be refactored
2235 * once we make generic logic for streams sharing encoder output
2236 */
2237 LOGH("Warning, Multiple ip/reprocess streams requested!");
2238 }
2239 zslStream = newStream;
2240 }
2241 }
2242 }
2243
2244 /* If a zsl stream is set, we know that we have configured at least one input or
2245 bidirectional stream */
2246 if (NULL != zslStream) {
2247 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2248 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2249 mInputStreamInfo.format = zslStream->format;
2250 mInputStreamInfo.usage = zslStream->usage;
2251 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2252 mInputStreamInfo.dim.width,
2253 mInputStreamInfo.dim.height,
2254 mInputStreamInfo.format, mInputStreamInfo.usage);
2255 }
2256
2257 cleanAndSortStreamInfo();
2258 if (mMetadataChannel) {
2259 delete mMetadataChannel;
2260 mMetadataChannel = NULL;
2261 }
2262 if (mSupportChannel) {
2263 delete mSupportChannel;
2264 mSupportChannel = NULL;
2265 }
2266
2267 if (mAnalysisChannel) {
2268 delete mAnalysisChannel;
2269 mAnalysisChannel = NULL;
2270 }
2271
2272 if (mDummyBatchChannel) {
2273 delete mDummyBatchChannel;
2274 mDummyBatchChannel = NULL;
2275 }
2276
Emilian Peev7650c122017-01-19 08:24:33 -08002277 if (mDepthChannel) {
2278 mDepthChannel = NULL;
2279 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002280 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002281
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002282 mShutterDispatcher.clear();
2283 mOutputBufferDispatcher.clear();
2284
Thierry Strudel2896d122017-02-23 19:18:03 -08002285 char is_type_value[PROPERTY_VALUE_MAX];
2286 property_get("persist.camera.is_type", is_type_value, "4");
2287 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2288
Binhao Line406f062017-05-03 14:39:44 -07002289 char property_value[PROPERTY_VALUE_MAX];
2290 property_get("persist.camera.gzoom.at", property_value, "0");
2291 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002292 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2293 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2294 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2295 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002296
2297 property_get("persist.camera.gzoom.4k", property_value, "0");
2298 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2299
Thierry Strudel3d639192016-09-09 11:52:26 -07002300 //Create metadata channel and initialize it
2301 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2302 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2303 gCamCapability[mCameraId]->color_arrangement);
2304 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2305 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002306 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002307 if (mMetadataChannel == NULL) {
2308 LOGE("failed to allocate metadata channel");
2309 rc = -ENOMEM;
2310 pthread_mutex_unlock(&mMutex);
2311 return rc;
2312 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002313 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002314 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2315 if (rc < 0) {
2316 LOGE("metadata channel initialization failed");
2317 delete mMetadataChannel;
2318 mMetadataChannel = NULL;
2319 pthread_mutex_unlock(&mMutex);
2320 return rc;
2321 }
2322
Thierry Strudel2896d122017-02-23 19:18:03 -08002323 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002324 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002325 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002326 // Keep track of preview/video streams indices.
2327 // There could be more than one preview streams, but only one video stream.
2328 int32_t video_stream_idx = -1;
2329 int32_t preview_stream_idx[streamList->num_streams];
2330 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002331 bool previewTnr[streamList->num_streams];
2332 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2333 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2334 // Loop through once to determine preview TNR conditions before creating channels.
2335 for (size_t i = 0; i < streamList->num_streams; i++) {
2336 camera3_stream_t *newStream = streamList->streams[i];
2337 uint32_t stream_usage = newStream->usage;
2338 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2339 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2340 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2341 video_stream_idx = (int32_t)i;
2342 else
2343 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2344 }
2345 }
2346 // By default, preview stream TNR is disabled.
2347 // Enable TNR to the preview stream if all conditions below are satisfied:
2348 // 1. preview resolution == video resolution.
2349 // 2. video stream TNR is enabled.
2350 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2351 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2352 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2353 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2354 if (m_bTnrEnabled && m_bTnrVideo &&
2355 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2356 video_stream->width == preview_stream->width &&
2357 video_stream->height == preview_stream->height) {
2358 previewTnr[preview_stream_idx[i]] = true;
2359 }
2360 }
2361
Thierry Strudel3d639192016-09-09 11:52:26 -07002362 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2363 /* Allocate channel objects for the requested streams */
2364 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002365
Thierry Strudel3d639192016-09-09 11:52:26 -07002366 camera3_stream_t *newStream = streamList->streams[i];
2367 uint32_t stream_usage = newStream->usage;
2368 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2369 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2370 struct camera_info *p_info = NULL;
2371 pthread_mutex_lock(&gCamLock);
2372 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2373 pthread_mutex_unlock(&gCamLock);
2374 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2375 || IS_USAGE_ZSL(newStream->usage)) &&
2376 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002377 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002378 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002379 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2380 if (bUseCommonFeatureMask)
2381 zsl_ppmask = commonFeatureMask;
2382 else
2383 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002384 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002385 if (numStreamsOnEncoder > 0)
2386 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2387 else
2388 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002390 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002392 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002393 LOGH("Input stream configured, reprocess config");
2394 } else {
2395 //for non zsl streams find out the format
2396 switch (newStream->format) {
2397 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2398 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002399 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002400 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2401 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2402 /* add additional features to pp feature mask */
2403 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2404 mStreamConfigInfo.num_streams);
2405
2406 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2407 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2408 CAM_STREAM_TYPE_VIDEO;
2409 if (m_bTnrEnabled && m_bTnrVideo) {
2410 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2411 CAM_QCOM_FEATURE_CPP_TNR;
2412 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2413 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2414 ~CAM_QCOM_FEATURE_CDS;
2415 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2417 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2418 CAM_QTI_FEATURE_PPEISCORE;
2419 }
Binhao Line406f062017-05-03 14:39:44 -07002420 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2421 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2422 CAM_QCOM_FEATURE_GOOG_ZOOM;
2423 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002424 } else {
2425 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2426 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002427 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2429 CAM_QCOM_FEATURE_CPP_TNR;
2430 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2431 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2432 ~CAM_QCOM_FEATURE_CDS;
2433 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002434 if(!m_bSwTnrPreview) {
2435 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2436 ~CAM_QTI_FEATURE_SW_TNR;
2437 }
Binhao Line406f062017-05-03 14:39:44 -07002438 if (is_goog_zoom_preview_enabled) {
2439 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2440 CAM_QCOM_FEATURE_GOOG_ZOOM;
2441 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002442 padding_info.width_padding = mSurfaceStridePadding;
2443 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002444 previewSize.width = (int32_t)newStream->width;
2445 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002446 }
2447 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2448 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2449 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2450 newStream->height;
2451 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2452 newStream->width;
2453 }
2454 }
2455 break;
2456 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002457 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002458 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2459 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2460 if (bUseCommonFeatureMask)
2461 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2462 commonFeatureMask;
2463 else
2464 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2465 CAM_QCOM_FEATURE_NONE;
2466 } else {
2467 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2468 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2469 }
2470 break;
2471 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002472 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002473 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2474 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2475 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2477 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002478 /* Remove rotation if it is not supported
2479 for 4K LiveVideo snapshot case (online processing) */
2480 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2481 CAM_QCOM_FEATURE_ROTATION)) {
2482 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2483 &= ~CAM_QCOM_FEATURE_ROTATION;
2484 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002485 } else {
2486 if (bUseCommonFeatureMask &&
2487 isOnEncoder(maxViewfinderSize, newStream->width,
2488 newStream->height)) {
2489 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2490 } else {
2491 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2492 }
2493 }
2494 if (isZsl) {
2495 if (zslStream) {
2496 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2497 (int32_t)zslStream->width;
2498 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2499 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002500 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2501 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002502 } else {
2503 LOGE("Error, No ZSL stream identified");
2504 pthread_mutex_unlock(&mMutex);
2505 return -EINVAL;
2506 }
2507 } else if (m_bIs4KVideo) {
2508 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2509 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2510 } else if (bYuv888OverrideJpeg) {
2511 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2512 (int32_t)largeYuv888Size.width;
2513 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2514 (int32_t)largeYuv888Size.height;
2515 }
2516 break;
2517 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2518 case HAL_PIXEL_FORMAT_RAW16:
2519 case HAL_PIXEL_FORMAT_RAW10:
2520 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2521 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2522 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002523 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2524 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2525 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2526 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2527 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2528 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2529 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2530 gCamCapability[mCameraId]->dt[mPDIndex];
2531 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2532 gCamCapability[mCameraId]->vc[mPDIndex];
2533 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002534 break;
2535 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002536 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002537 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2538 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2539 break;
2540 }
2541 }
2542
2543 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2544 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2545 gCamCapability[mCameraId]->color_arrangement);
2546
2547 if (newStream->priv == NULL) {
2548 //New stream, construct channel
2549 switch (newStream->stream_type) {
2550 case CAMERA3_STREAM_INPUT:
2551 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2552 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2553 break;
2554 case CAMERA3_STREAM_BIDIRECTIONAL:
2555 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2556 GRALLOC_USAGE_HW_CAMERA_WRITE;
2557 break;
2558 case CAMERA3_STREAM_OUTPUT:
2559 /* For video encoding stream, set read/write rarely
2560 * flag so that they may be set to un-cached */
2561 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2562 newStream->usage |=
2563 (GRALLOC_USAGE_SW_READ_RARELY |
2564 GRALLOC_USAGE_SW_WRITE_RARELY |
2565 GRALLOC_USAGE_HW_CAMERA_WRITE);
2566 else if (IS_USAGE_ZSL(newStream->usage))
2567 {
2568 LOGD("ZSL usage flag skipping");
2569 }
2570 else if (newStream == zslStream
2571 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2572 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2573 } else
2574 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2575 break;
2576 default:
2577 LOGE("Invalid stream_type %d", newStream->stream_type);
2578 break;
2579 }
2580
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002581 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002582 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2583 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2584 QCamera3ProcessingChannel *channel = NULL;
2585 switch (newStream->format) {
2586 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2587 if ((newStream->usage &
2588 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2589 (streamList->operation_mode ==
2590 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2591 ) {
2592 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2593 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002594 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002595 this,
2596 newStream,
2597 (cam_stream_type_t)
2598 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2599 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2600 mMetadataChannel,
2601 0); //heap buffers are not required for HFR video channel
2602 if (channel == NULL) {
2603 LOGE("allocation of channel failed");
2604 pthread_mutex_unlock(&mMutex);
2605 return -ENOMEM;
2606 }
2607 //channel->getNumBuffers() will return 0 here so use
2608 //MAX_INFLIGH_HFR_REQUESTS
2609 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2610 newStream->priv = channel;
2611 LOGI("num video buffers in HFR mode: %d",
2612 MAX_INFLIGHT_HFR_REQUESTS);
2613 } else {
2614 /* Copy stream contents in HFR preview only case to create
2615 * dummy batch channel so that sensor streaming is in
2616 * HFR mode */
2617 if (!m_bIsVideo && (streamList->operation_mode ==
2618 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2619 mDummyBatchStream = *newStream;
2620 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002621 int bufferCount = MAX_INFLIGHT_REQUESTS;
2622 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2623 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002624 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2625 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2626 bufferCount = m_bIs4KVideo ?
2627 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2628 }
2629
Thierry Strudel2896d122017-02-23 19:18:03 -08002630 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002631 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2632 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002633 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002634 this,
2635 newStream,
2636 (cam_stream_type_t)
2637 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2638 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2639 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002640 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002641 if (channel == NULL) {
2642 LOGE("allocation of channel failed");
2643 pthread_mutex_unlock(&mMutex);
2644 return -ENOMEM;
2645 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002646 /* disable UBWC for preview, though supported,
2647 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002648 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002649 (previewSize.width == (int32_t)videoWidth)&&
2650 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002651 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002652 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002653 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002654 /* When goog_zoom is linked to the preview or video stream,
2655 * disable ubwc to the linked stream */
2656 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2657 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2658 channel->setUBWCEnabled(false);
2659 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002660 newStream->max_buffers = channel->getNumBuffers();
2661 newStream->priv = channel;
2662 }
2663 break;
2664 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2665 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2666 mChannelHandle,
2667 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002668 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002669 this,
2670 newStream,
2671 (cam_stream_type_t)
2672 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2673 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2674 mMetadataChannel);
2675 if (channel == NULL) {
2676 LOGE("allocation of YUV channel failed");
2677 pthread_mutex_unlock(&mMutex);
2678 return -ENOMEM;
2679 }
2680 newStream->max_buffers = channel->getNumBuffers();
2681 newStream->priv = channel;
2682 break;
2683 }
2684 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2685 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002686 case HAL_PIXEL_FORMAT_RAW10: {
2687 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2688 (HAL_DATASPACE_DEPTH != newStream->data_space))
2689 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002690 mRawChannel = new QCamera3RawChannel(
2691 mCameraHandle->camera_handle, mChannelHandle,
2692 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002693 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002694 this, newStream,
2695 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002696 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 if (mRawChannel == NULL) {
2698 LOGE("allocation of raw channel failed");
2699 pthread_mutex_unlock(&mMutex);
2700 return -ENOMEM;
2701 }
2702 newStream->max_buffers = mRawChannel->getNumBuffers();
2703 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2704 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002707 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2708 mDepthChannel = new QCamera3DepthChannel(
2709 mCameraHandle->camera_handle, mChannelHandle,
2710 mCameraHandle->ops, NULL, NULL, &padding_info,
2711 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2712 mMetadataChannel);
2713 if (NULL == mDepthChannel) {
2714 LOGE("Allocation of depth channel failed");
2715 pthread_mutex_unlock(&mMutex);
2716 return NO_MEMORY;
2717 }
2718 newStream->priv = mDepthChannel;
2719 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2720 } else {
2721 // Max live snapshot inflight buffer is 1. This is to mitigate
2722 // frame drop issues for video snapshot. The more buffers being
2723 // allocated, the more frame drops there are.
2724 mPictureChannel = new QCamera3PicChannel(
2725 mCameraHandle->camera_handle, mChannelHandle,
2726 mCameraHandle->ops, captureResultCb,
2727 setBufferErrorStatus, &padding_info, this, newStream,
2728 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2729 m_bIs4KVideo, isZsl, mMetadataChannel,
2730 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2731 if (mPictureChannel == NULL) {
2732 LOGE("allocation of channel failed");
2733 pthread_mutex_unlock(&mMutex);
2734 return -ENOMEM;
2735 }
2736 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2737 newStream->max_buffers = mPictureChannel->getNumBuffers();
2738 mPictureChannel->overrideYuvSize(
2739 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2740 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002741 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002742 break;
2743
2744 default:
2745 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002746 pthread_mutex_unlock(&mMutex);
2747 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002748 }
2749 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2750 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2751 } else {
2752 LOGE("Error, Unknown stream type");
2753 pthread_mutex_unlock(&mMutex);
2754 return -EINVAL;
2755 }
2756
2757 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002758 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002759 // Here we only care whether it's EIS3 or not
2760 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2761 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2762 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2763 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002764 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002765 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002766 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002767 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2768 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2769 }
2770 }
2771
2772 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2773 it != mStreamInfo.end(); it++) {
2774 if ((*it)->stream == newStream) {
2775 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2776 break;
2777 }
2778 }
2779 } else {
2780 // Channel already exists for this stream
2781 // Do nothing for now
2782 }
2783 padding_info = gCamCapability[mCameraId]->padding_info;
2784
Emilian Peev7650c122017-01-19 08:24:33 -08002785 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002786 * since there is no real stream associated with it
2787 */
Emilian Peev7650c122017-01-19 08:24:33 -08002788 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002789 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2790 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002791 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002792 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002793 }
2794
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002795 // Let buffer dispatcher know the configured streams.
2796 mOutputBufferDispatcher.configureStreams(streamList);
2797
Thierry Strudel2896d122017-02-23 19:18:03 -08002798 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2799 onlyRaw = false;
2800 }
2801
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002802 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002803 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002804 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002805 cam_analysis_info_t analysisInfo;
2806 int32_t ret = NO_ERROR;
2807 ret = mCommon.getAnalysisInfo(
2808 FALSE,
2809 analysisFeatureMask,
2810 &analysisInfo);
2811 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002812 cam_color_filter_arrangement_t analysis_color_arrangement =
2813 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2814 CAM_FILTER_ARRANGEMENT_Y :
2815 gCamCapability[mCameraId]->color_arrangement);
2816 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2817 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002818 cam_dimension_t analysisDim;
2819 analysisDim = mCommon.getMatchingDimension(previewSize,
2820 analysisInfo.analysis_recommended_res);
2821
2822 mAnalysisChannel = new QCamera3SupportChannel(
2823 mCameraHandle->camera_handle,
2824 mChannelHandle,
2825 mCameraHandle->ops,
2826 &analysisInfo.analysis_padding_info,
2827 analysisFeatureMask,
2828 CAM_STREAM_TYPE_ANALYSIS,
2829 &analysisDim,
2830 (analysisInfo.analysis_format
2831 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2832 : CAM_FORMAT_YUV_420_NV21),
2833 analysisInfo.hw_analysis_supported,
2834 gCamCapability[mCameraId]->color_arrangement,
2835 this,
2836 0); // force buffer count to 0
2837 } else {
2838 LOGW("getAnalysisInfo failed, ret = %d", ret);
2839 }
2840 if (!mAnalysisChannel) {
2841 LOGW("Analysis channel cannot be created");
2842 }
2843 }
2844
Thierry Strudel3d639192016-09-09 11:52:26 -07002845 //RAW DUMP channel
2846 if (mEnableRawDump && isRawStreamRequested == false){
2847 cam_dimension_t rawDumpSize;
2848 rawDumpSize = getMaxRawSize(mCameraId);
2849 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2850 setPAAFSupport(rawDumpFeatureMask,
2851 CAM_STREAM_TYPE_RAW,
2852 gCamCapability[mCameraId]->color_arrangement);
2853 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2854 mChannelHandle,
2855 mCameraHandle->ops,
2856 rawDumpSize,
2857 &padding_info,
2858 this, rawDumpFeatureMask);
2859 if (!mRawDumpChannel) {
2860 LOGE("Raw Dump channel cannot be created");
2861 pthread_mutex_unlock(&mMutex);
2862 return -ENOMEM;
2863 }
2864 }
2865
Thierry Strudel3d639192016-09-09 11:52:26 -07002866 if (mAnalysisChannel) {
2867 cam_analysis_info_t analysisInfo;
2868 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2869 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2870 CAM_STREAM_TYPE_ANALYSIS;
2871 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2872 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002873 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002874 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2875 &analysisInfo);
2876 if (rc != NO_ERROR) {
2877 LOGE("getAnalysisInfo failed, ret = %d", rc);
2878 pthread_mutex_unlock(&mMutex);
2879 return rc;
2880 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002881 cam_color_filter_arrangement_t analysis_color_arrangement =
2882 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2883 CAM_FILTER_ARRANGEMENT_Y :
2884 gCamCapability[mCameraId]->color_arrangement);
2885 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2886 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2887 analysis_color_arrangement);
2888
Thierry Strudel3d639192016-09-09 11:52:26 -07002889 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002890 mCommon.getMatchingDimension(previewSize,
2891 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002892 mStreamConfigInfo.num_streams++;
2893 }
2894
Thierry Strudel2896d122017-02-23 19:18:03 -08002895 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002896 cam_analysis_info_t supportInfo;
2897 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2898 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2899 setPAAFSupport(callbackFeatureMask,
2900 CAM_STREAM_TYPE_CALLBACK,
2901 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002902 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002903 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002904 if (ret != NO_ERROR) {
2905 /* Ignore the error for Mono camera
2906 * because the PAAF bit mask is only set
2907 * for CAM_STREAM_TYPE_ANALYSIS stream type
2908 */
2909 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2910 LOGW("getAnalysisInfo failed, ret = %d", ret);
2911 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002912 }
2913 mSupportChannel = new QCamera3SupportChannel(
2914 mCameraHandle->camera_handle,
2915 mChannelHandle,
2916 mCameraHandle->ops,
2917 &gCamCapability[mCameraId]->padding_info,
2918 callbackFeatureMask,
2919 CAM_STREAM_TYPE_CALLBACK,
2920 &QCamera3SupportChannel::kDim,
2921 CAM_FORMAT_YUV_420_NV21,
2922 supportInfo.hw_analysis_supported,
2923 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002924 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002925 if (!mSupportChannel) {
2926 LOGE("dummy channel cannot be created");
2927 pthread_mutex_unlock(&mMutex);
2928 return -ENOMEM;
2929 }
2930 }
2931
2932 if (mSupportChannel) {
2933 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2934 QCamera3SupportChannel::kDim;
2935 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2936 CAM_STREAM_TYPE_CALLBACK;
2937 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2938 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2939 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2940 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2941 gCamCapability[mCameraId]->color_arrangement);
2942 mStreamConfigInfo.num_streams++;
2943 }
2944
2945 if (mRawDumpChannel) {
2946 cam_dimension_t rawSize;
2947 rawSize = getMaxRawSize(mCameraId);
2948 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2949 rawSize;
2950 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2951 CAM_STREAM_TYPE_RAW;
2952 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2953 CAM_QCOM_FEATURE_NONE;
2954 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2955 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2956 gCamCapability[mCameraId]->color_arrangement);
2957 mStreamConfigInfo.num_streams++;
2958 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002959
2960 if (mHdrPlusRawSrcChannel) {
2961 cam_dimension_t rawSize;
2962 rawSize = getMaxRawSize(mCameraId);
2963 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2964 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2965 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2966 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2967 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2968 gCamCapability[mCameraId]->color_arrangement);
2969 mStreamConfigInfo.num_streams++;
2970 }
2971
Thierry Strudel3d639192016-09-09 11:52:26 -07002972 /* In HFR mode, if video stream is not added, create a dummy channel so that
2973 * ISP can create a batch mode even for preview only case. This channel is
2974 * never 'start'ed (no stream-on), it is only 'initialized' */
2975 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2976 !m_bIsVideo) {
2977 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2978 setPAAFSupport(dummyFeatureMask,
2979 CAM_STREAM_TYPE_VIDEO,
2980 gCamCapability[mCameraId]->color_arrangement);
2981 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2982 mChannelHandle,
2983 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002984 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002985 this,
2986 &mDummyBatchStream,
2987 CAM_STREAM_TYPE_VIDEO,
2988 dummyFeatureMask,
2989 mMetadataChannel);
2990 if (NULL == mDummyBatchChannel) {
2991 LOGE("creation of mDummyBatchChannel failed."
2992 "Preview will use non-hfr sensor mode ");
2993 }
2994 }
2995 if (mDummyBatchChannel) {
2996 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2997 mDummyBatchStream.width;
2998 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2999 mDummyBatchStream.height;
3000 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3001 CAM_STREAM_TYPE_VIDEO;
3002 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3003 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3004 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3005 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3006 gCamCapability[mCameraId]->color_arrangement);
3007 mStreamConfigInfo.num_streams++;
3008 }
3009
3010 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3011 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003012 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003013 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003014
3015 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3016 for (pendingRequestIterator i = mPendingRequestsList.begin();
3017 i != mPendingRequestsList.end();) {
3018 i = erasePendingRequest(i);
3019 }
3020 mPendingFrameDropList.clear();
3021 // Initialize/Reset the pending buffers list
3022 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3023 req.mPendingBufferList.clear();
3024 }
3025 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3026
Thierry Strudel3d639192016-09-09 11:52:26 -07003027 mCurJpegMeta.clear();
3028 //Get min frame duration for this streams configuration
3029 deriveMinFrameDuration();
3030
Chien-Yu Chenee335912017-02-09 17:53:20 -08003031 mFirstPreviewIntentSeen = false;
3032
3033 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003034 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003035 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3036 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003037 disableHdrPlusModeLocked();
3038 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003039
Thierry Strudel3d639192016-09-09 11:52:26 -07003040 // Update state
3041 mState = CONFIGURED;
3042
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003043 mFirstMetadataCallback = true;
3044
Thierry Strudel3d639192016-09-09 11:52:26 -07003045 pthread_mutex_unlock(&mMutex);
3046
3047 return rc;
3048}
3049
3050/*===========================================================================
3051 * FUNCTION : validateCaptureRequest
3052 *
3053 * DESCRIPTION: validate a capture request from camera service
3054 *
3055 * PARAMETERS :
3056 * @request : request from framework to process
3057 *
3058 * RETURN :
3059 *
3060 *==========================================================================*/
3061int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003062 camera3_capture_request_t *request,
3063 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003064{
3065 ssize_t idx = 0;
3066 const camera3_stream_buffer_t *b;
3067 CameraMetadata meta;
3068
3069 /* Sanity check the request */
3070 if (request == NULL) {
3071 LOGE("NULL capture request");
3072 return BAD_VALUE;
3073 }
3074
3075 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3076 /*settings cannot be null for the first request*/
3077 return BAD_VALUE;
3078 }
3079
3080 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003081 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3082 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003083 LOGE("Request %d: No output buffers provided!",
3084 __FUNCTION__, frameNumber);
3085 return BAD_VALUE;
3086 }
3087 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3088 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3089 request->num_output_buffers, MAX_NUM_STREAMS);
3090 return BAD_VALUE;
3091 }
3092 if (request->input_buffer != NULL) {
3093 b = request->input_buffer;
3094 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3095 LOGE("Request %d: Buffer %ld: Status not OK!",
3096 frameNumber, (long)idx);
3097 return BAD_VALUE;
3098 }
3099 if (b->release_fence != -1) {
3100 LOGE("Request %d: Buffer %ld: Has a release fence!",
3101 frameNumber, (long)idx);
3102 return BAD_VALUE;
3103 }
3104 if (b->buffer == NULL) {
3105 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3106 frameNumber, (long)idx);
3107 return BAD_VALUE;
3108 }
3109 }
3110
3111 // Validate all buffers
3112 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003113 if (b == NULL) {
3114 return BAD_VALUE;
3115 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003116 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003117 QCamera3ProcessingChannel *channel =
3118 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3119 if (channel == NULL) {
3120 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3121 frameNumber, (long)idx);
3122 return BAD_VALUE;
3123 }
3124 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3125 LOGE("Request %d: Buffer %ld: Status not OK!",
3126 frameNumber, (long)idx);
3127 return BAD_VALUE;
3128 }
3129 if (b->release_fence != -1) {
3130 LOGE("Request %d: Buffer %ld: Has a release fence!",
3131 frameNumber, (long)idx);
3132 return BAD_VALUE;
3133 }
3134 if (b->buffer == NULL) {
3135 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3136 frameNumber, (long)idx);
3137 return BAD_VALUE;
3138 }
3139 if (*(b->buffer) == NULL) {
3140 LOGE("Request %d: Buffer %ld: NULL private handle!",
3141 frameNumber, (long)idx);
3142 return BAD_VALUE;
3143 }
3144 idx++;
3145 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003146 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003147 return NO_ERROR;
3148}
3149
3150/*===========================================================================
3151 * FUNCTION : deriveMinFrameDuration
3152 *
3153 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3154 * on currently configured streams.
3155 *
3156 * PARAMETERS : NONE
3157 *
3158 * RETURN : NONE
3159 *
3160 *==========================================================================*/
3161void QCamera3HardwareInterface::deriveMinFrameDuration()
3162{
3163 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003164 bool hasRaw = false;
3165
3166 mMinRawFrameDuration = 0;
3167 mMinJpegFrameDuration = 0;
3168 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003169
3170 maxJpegDim = 0;
3171 maxProcessedDim = 0;
3172 maxRawDim = 0;
3173
3174 // Figure out maximum jpeg, processed, and raw dimensions
3175 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3176 it != mStreamInfo.end(); it++) {
3177
3178 // Input stream doesn't have valid stream_type
3179 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3180 continue;
3181
3182 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3183 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3184 if (dimension > maxJpegDim)
3185 maxJpegDim = dimension;
3186 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3187 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3188 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003189 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003190 if (dimension > maxRawDim)
3191 maxRawDim = dimension;
3192 } else {
3193 if (dimension > maxProcessedDim)
3194 maxProcessedDim = dimension;
3195 }
3196 }
3197
3198 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3199 MAX_SIZES_CNT);
3200
3201 //Assume all jpeg dimensions are in processed dimensions.
3202 if (maxJpegDim > maxProcessedDim)
3203 maxProcessedDim = maxJpegDim;
3204 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003205 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003206 maxRawDim = INT32_MAX;
3207
3208 for (size_t i = 0; i < count; i++) {
3209 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3210 gCamCapability[mCameraId]->raw_dim[i].height;
3211 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3212 maxRawDim = dimension;
3213 }
3214 }
3215
3216 //Find minimum durations for processed, jpeg, and raw
3217 for (size_t i = 0; i < count; i++) {
3218 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3219 gCamCapability[mCameraId]->raw_dim[i].height) {
3220 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3221 break;
3222 }
3223 }
3224 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3225 for (size_t i = 0; i < count; i++) {
3226 if (maxProcessedDim ==
3227 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3228 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3229 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3230 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3231 break;
3232 }
3233 }
3234}
3235
3236/*===========================================================================
3237 * FUNCTION : getMinFrameDuration
3238 *
3239 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3240 * and current request configuration.
3241 *
3242 * PARAMETERS : @request: requset sent by the frameworks
3243 *
3244 * RETURN : min farme duration for a particular request
3245 *
3246 *==========================================================================*/
3247int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3248{
3249 bool hasJpegStream = false;
3250 bool hasRawStream = false;
3251 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3252 const camera3_stream_t *stream = request->output_buffers[i].stream;
3253 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3254 hasJpegStream = true;
3255 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3256 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3257 stream->format == HAL_PIXEL_FORMAT_RAW16)
3258 hasRawStream = true;
3259 }
3260
3261 if (!hasJpegStream)
3262 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3263 else
3264 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3265}
3266
3267/*===========================================================================
3268 * FUNCTION : handleBuffersDuringFlushLock
3269 *
3270 * DESCRIPTION: Account for buffers returned from back-end during flush
3271 * This function is executed while mMutex is held by the caller.
3272 *
3273 * PARAMETERS :
3274 * @buffer: image buffer for the callback
3275 *
3276 * RETURN :
3277 *==========================================================================*/
3278void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3279{
3280 bool buffer_found = false;
3281 for (List<PendingBuffersInRequest>::iterator req =
3282 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3283 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3284 for (List<PendingBufferInfo>::iterator i =
3285 req->mPendingBufferList.begin();
3286 i != req->mPendingBufferList.end(); i++) {
3287 if (i->buffer == buffer->buffer) {
3288 mPendingBuffersMap.numPendingBufsAtFlush--;
3289 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3290 buffer->buffer, req->frame_number,
3291 mPendingBuffersMap.numPendingBufsAtFlush);
3292 buffer_found = true;
3293 break;
3294 }
3295 }
3296 if (buffer_found) {
3297 break;
3298 }
3299 }
3300 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3301 //signal the flush()
3302 LOGD("All buffers returned to HAL. Continue flush");
3303 pthread_cond_signal(&mBuffersCond);
3304 }
3305}
3306
Thierry Strudel3d639192016-09-09 11:52:26 -07003307/*===========================================================================
3308 * FUNCTION : handleBatchMetadata
3309 *
3310 * DESCRIPTION: Handles metadata buffer callback in batch mode
3311 *
3312 * PARAMETERS : @metadata_buf: metadata buffer
3313 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3314 * the meta buf in this method
3315 *
3316 * RETURN :
3317 *
3318 *==========================================================================*/
3319void QCamera3HardwareInterface::handleBatchMetadata(
3320 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3321{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003322 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003323
3324 if (NULL == metadata_buf) {
3325 LOGE("metadata_buf is NULL");
3326 return;
3327 }
3328 /* In batch mode, the metdata will contain the frame number and timestamp of
3329 * the last frame in the batch. Eg: a batch containing buffers from request
3330 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3331 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3332 * multiple process_capture_results */
3333 metadata_buffer_t *metadata =
3334 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3335 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3336 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3337 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3338 uint32_t frame_number = 0, urgent_frame_number = 0;
3339 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3340 bool invalid_metadata = false;
3341 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3342 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003343 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003344
3345 int32_t *p_frame_number_valid =
3346 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3347 uint32_t *p_frame_number =
3348 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3349 int64_t *p_capture_time =
3350 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3351 int32_t *p_urgent_frame_number_valid =
3352 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3353 uint32_t *p_urgent_frame_number =
3354 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3355
3356 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3357 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3358 (NULL == p_urgent_frame_number)) {
3359 LOGE("Invalid metadata");
3360 invalid_metadata = true;
3361 } else {
3362 frame_number_valid = *p_frame_number_valid;
3363 last_frame_number = *p_frame_number;
3364 last_frame_capture_time = *p_capture_time;
3365 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3366 last_urgent_frame_number = *p_urgent_frame_number;
3367 }
3368
3369 /* In batchmode, when no video buffers are requested, set_parms are sent
3370 * for every capture_request. The difference between consecutive urgent
3371 * frame numbers and frame numbers should be used to interpolate the
3372 * corresponding frame numbers and time stamps */
3373 pthread_mutex_lock(&mMutex);
3374 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003375 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3376 if(idx < 0) {
3377 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3378 last_urgent_frame_number);
3379 mState = ERROR;
3380 pthread_mutex_unlock(&mMutex);
3381 return;
3382 }
3383 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003384 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3385 first_urgent_frame_number;
3386
3387 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3388 urgent_frame_number_valid,
3389 first_urgent_frame_number, last_urgent_frame_number);
3390 }
3391
3392 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003393 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3394 if(idx < 0) {
3395 LOGE("Invalid frame number received: %d. Irrecoverable error",
3396 last_frame_number);
3397 mState = ERROR;
3398 pthread_mutex_unlock(&mMutex);
3399 return;
3400 }
3401 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003402 frameNumDiff = last_frame_number + 1 -
3403 first_frame_number;
3404 mPendingBatchMap.removeItem(last_frame_number);
3405
3406 LOGD("frm: valid: %d frm_num: %d - %d",
3407 frame_number_valid,
3408 first_frame_number, last_frame_number);
3409
3410 }
3411 pthread_mutex_unlock(&mMutex);
3412
3413 if (urgent_frame_number_valid || frame_number_valid) {
3414 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3415 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3416 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3417 urgentFrameNumDiff, last_urgent_frame_number);
3418 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3419 LOGE("frameNumDiff: %d frameNum: %d",
3420 frameNumDiff, last_frame_number);
3421 }
3422
3423 for (size_t i = 0; i < loopCount; i++) {
3424 /* handleMetadataWithLock is called even for invalid_metadata for
3425 * pipeline depth calculation */
3426 if (!invalid_metadata) {
3427 /* Infer frame number. Batch metadata contains frame number of the
3428 * last frame */
3429 if (urgent_frame_number_valid) {
3430 if (i < urgentFrameNumDiff) {
3431 urgent_frame_number =
3432 first_urgent_frame_number + i;
3433 LOGD("inferred urgent frame_number: %d",
3434 urgent_frame_number);
3435 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3436 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3437 } else {
3438 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3439 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3440 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3441 }
3442 }
3443
3444 /* Infer frame number. Batch metadata contains frame number of the
3445 * last frame */
3446 if (frame_number_valid) {
3447 if (i < frameNumDiff) {
3448 frame_number = first_frame_number + i;
3449 LOGD("inferred frame_number: %d", frame_number);
3450 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3451 CAM_INTF_META_FRAME_NUMBER, frame_number);
3452 } else {
3453 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3454 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3455 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3456 }
3457 }
3458
3459 if (last_frame_capture_time) {
3460 //Infer timestamp
3461 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003462 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003463 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003464 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003465 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3466 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3467 LOGD("batch capture_time: %lld, capture_time: %lld",
3468 last_frame_capture_time, capture_time);
3469 }
3470 }
3471 pthread_mutex_lock(&mMutex);
3472 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003473 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003474 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3475 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003476 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003477 pthread_mutex_unlock(&mMutex);
3478 }
3479
3480 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003481 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003482 mMetadataChannel->bufDone(metadata_buf);
3483 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003484 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003485 }
3486}
3487
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003488void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3489 camera3_error_msg_code_t errorCode)
3490{
3491 camera3_notify_msg_t notify_msg;
3492 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3493 notify_msg.type = CAMERA3_MSG_ERROR;
3494 notify_msg.message.error.error_code = errorCode;
3495 notify_msg.message.error.error_stream = NULL;
3496 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003497 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003498
3499 return;
3500}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003501
3502/*===========================================================================
3503 * FUNCTION : sendPartialMetadataWithLock
3504 *
3505 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3506 *
3507 * PARAMETERS : @metadata: metadata buffer
3508 * @requestIter: The iterator for the pending capture request for
3509 * which the partial result is being sen
3510 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3511 * last urgent metadata in a batch. Always true for non-batch mode
3512 *
3513 * RETURN :
3514 *
3515 *==========================================================================*/
3516
3517void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3518 metadata_buffer_t *metadata,
3519 const pendingRequestIterator requestIter,
3520 bool lastUrgentMetadataInBatch)
3521{
3522 camera3_capture_result_t result;
3523 memset(&result, 0, sizeof(camera3_capture_result_t));
3524
3525 requestIter->partial_result_cnt++;
3526
3527 // Extract 3A metadata
3528 result.result = translateCbUrgentMetadataToResultMetadata(
3529 metadata, lastUrgentMetadataInBatch);
3530 // Populate metadata result
3531 result.frame_number = requestIter->frame_number;
3532 result.num_output_buffers = 0;
3533 result.output_buffers = NULL;
3534 result.partial_result = requestIter->partial_result_cnt;
3535
3536 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003537 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003538 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3539 // Notify HDR+ client about the partial metadata.
3540 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3541 result.partial_result == PARTIAL_RESULT_COUNT);
3542 }
3543 }
3544
3545 orchestrateResult(&result);
3546 LOGD("urgent frame_number = %u", result.frame_number);
3547 free_camera_metadata((camera_metadata_t *)result.result);
3548}
3549
Thierry Strudel3d639192016-09-09 11:52:26 -07003550/*===========================================================================
3551 * FUNCTION : handleMetadataWithLock
3552 *
3553 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3554 *
3555 * PARAMETERS : @metadata_buf: metadata buffer
3556 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3557 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003558 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3559 * last urgent metadata in a batch. Always true for non-batch mode
3560 * @lastMetadataInBatch: Boolean to indicate whether this is the
3561 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003562 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3563 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003564 *
3565 * RETURN :
3566 *
3567 *==========================================================================*/
3568void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003569 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003570 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3571 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003572{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003573 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003574 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3575 //during flush do not send metadata from this thread
3576 LOGD("not sending metadata during flush or when mState is error");
3577 if (free_and_bufdone_meta_buf) {
3578 mMetadataChannel->bufDone(metadata_buf);
3579 free(metadata_buf);
3580 }
3581 return;
3582 }
3583
3584 //not in flush
3585 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3586 int32_t frame_number_valid, urgent_frame_number_valid;
3587 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003588 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003589 nsecs_t currentSysTime;
3590
3591 int32_t *p_frame_number_valid =
3592 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3593 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3594 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003595 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003596 int32_t *p_urgent_frame_number_valid =
3597 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3598 uint32_t *p_urgent_frame_number =
3599 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3600 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3601 metadata) {
3602 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3603 *p_frame_number_valid, *p_frame_number);
3604 }
3605
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003606 camera_metadata_t *resultMetadata = nullptr;
3607
Thierry Strudel3d639192016-09-09 11:52:26 -07003608 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3609 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3610 LOGE("Invalid metadata");
3611 if (free_and_bufdone_meta_buf) {
3612 mMetadataChannel->bufDone(metadata_buf);
3613 free(metadata_buf);
3614 }
3615 goto done_metadata;
3616 }
3617 frame_number_valid = *p_frame_number_valid;
3618 frame_number = *p_frame_number;
3619 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003620 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003621 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3622 urgent_frame_number = *p_urgent_frame_number;
3623 currentSysTime = systemTime(CLOCK_MONOTONIC);
3624
Jason Lee603176d2017-05-31 11:43:27 -07003625 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3626 const int tries = 3;
3627 nsecs_t bestGap, measured;
3628 for (int i = 0; i < tries; ++i) {
3629 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3630 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3631 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3632 const nsecs_t gap = tmono2 - tmono;
3633 if (i == 0 || gap < bestGap) {
3634 bestGap = gap;
3635 measured = tbase - ((tmono + tmono2) >> 1);
3636 }
3637 }
3638 capture_time -= measured;
3639 }
3640
Thierry Strudel3d639192016-09-09 11:52:26 -07003641 // Detect if buffers from any requests are overdue
3642 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003643 int64_t timeout;
3644 {
3645 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3646 // If there is a pending HDR+ request, the following requests may be blocked until the
3647 // HDR+ request is done. So allow a longer timeout.
3648 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3649 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3650 }
3651
3652 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003653 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003654 assert(missed.stream->priv);
3655 if (missed.stream->priv) {
3656 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3657 assert(ch->mStreams[0]);
3658 if (ch->mStreams[0]) {
3659 LOGE("Cancel missing frame = %d, buffer = %p,"
3660 "stream type = %d, stream format = %d",
3661 req.frame_number, missed.buffer,
3662 ch->mStreams[0]->getMyType(), missed.stream->format);
3663 ch->timeoutFrame(req.frame_number);
3664 }
3665 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003666 }
3667 }
3668 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003669 //For the very first metadata callback, regardless whether it contains valid
3670 //frame number, send the partial metadata for the jumpstarting requests.
3671 //Note that this has to be done even if the metadata doesn't contain valid
3672 //urgent frame number, because in the case only 1 request is ever submitted
3673 //to HAL, there won't be subsequent valid urgent frame number.
3674 if (mFirstMetadataCallback) {
3675 for (pendingRequestIterator i =
3676 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3677 if (i->bUseFirstPartial) {
3678 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3679 }
3680 }
3681 mFirstMetadataCallback = false;
3682 }
3683
Thierry Strudel3d639192016-09-09 11:52:26 -07003684 //Partial result on process_capture_result for timestamp
3685 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003686 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003687
3688 //Recieved an urgent Frame Number, handle it
3689 //using partial results
3690 for (pendingRequestIterator i =
3691 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3692 LOGD("Iterator Frame = %d urgent frame = %d",
3693 i->frame_number, urgent_frame_number);
3694
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003695 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003696 (i->partial_result_cnt == 0)) {
3697 LOGE("Error: HAL missed urgent metadata for frame number %d",
3698 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003699 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003700 }
3701
3702 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003703 i->partial_result_cnt == 0) {
3704 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003705 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3706 // Instant AEC settled for this frame.
3707 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3708 mInstantAECSettledFrameNumber = urgent_frame_number;
3709 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003710 break;
3711 }
3712 }
3713 }
3714
3715 if (!frame_number_valid) {
3716 LOGD("Not a valid normal frame number, used as SOF only");
3717 if (free_and_bufdone_meta_buf) {
3718 mMetadataChannel->bufDone(metadata_buf);
3719 free(metadata_buf);
3720 }
3721 goto done_metadata;
3722 }
3723 LOGH("valid frame_number = %u, capture_time = %lld",
3724 frame_number, capture_time);
3725
Emilian Peev4e0fe952017-06-30 12:40:09 -07003726 handleDepthDataLocked(metadata->depth_data, frame_number,
3727 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003728
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003729 // Check whether any stream buffer corresponding to this is dropped or not
3730 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3731 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3732 for (auto & pendingRequest : mPendingRequestsList) {
3733 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3734 mInstantAECSettledFrameNumber)) {
3735 camera3_notify_msg_t notify_msg = {};
3736 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003737 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003738 QCamera3ProcessingChannel *channel =
3739 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003740 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003741 if (p_cam_frame_drop) {
3742 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003743 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003744 // Got the stream ID for drop frame.
3745 dropFrame = true;
3746 break;
3747 }
3748 }
3749 } else {
3750 // This is instant AEC case.
3751 // For instant AEC drop the stream untill AEC is settled.
3752 dropFrame = true;
3753 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003754
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003755 if (dropFrame) {
3756 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3757 if (p_cam_frame_drop) {
3758 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003759 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003760 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003761 } else {
3762 // For instant AEC, inform frame drop and frame number
3763 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3764 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003765 pendingRequest.frame_number, streamID,
3766 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003767 }
3768 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003769 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003770 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003771 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003772 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003773 if (p_cam_frame_drop) {
3774 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003775 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003776 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003777 } else {
3778 // For instant AEC, inform frame drop and frame number
3779 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3780 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003781 pendingRequest.frame_number, streamID,
3782 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 }
3784 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003785 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003786 PendingFrameDrop.stream_ID = streamID;
3787 // Add the Frame drop info to mPendingFrameDropList
3788 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003789 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003790 }
3791 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003792 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003793
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003794 for (auto & pendingRequest : mPendingRequestsList) {
3795 // Find the pending request with the frame number.
3796 if (pendingRequest.frame_number == frame_number) {
3797 // Update the sensor timestamp.
3798 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003799
Thierry Strudel3d639192016-09-09 11:52:26 -07003800
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003801 /* Set the timestamp in display metadata so that clients aware of
3802 private_handle such as VT can use this un-modified timestamps.
3803 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003804 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003805
Thierry Strudel3d639192016-09-09 11:52:26 -07003806 // Find channel requiring metadata, meaning internal offline postprocess
3807 // is needed.
3808 //TODO: for now, we don't support two streams requiring metadata at the same time.
3809 // (because we are not making copies, and metadata buffer is not reference counted.
3810 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003811 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3812 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003813 if (iter->need_metadata) {
3814 internalPproc = true;
3815 QCamera3ProcessingChannel *channel =
3816 (QCamera3ProcessingChannel *)iter->stream->priv;
3817 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003818 if(p_is_metabuf_queued != NULL) {
3819 *p_is_metabuf_queued = true;
3820 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003821 break;
3822 }
3823 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003824 for (auto itr = pendingRequest.internalRequestList.begin();
3825 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003826 if (itr->need_metadata) {
3827 internalPproc = true;
3828 QCamera3ProcessingChannel *channel =
3829 (QCamera3ProcessingChannel *)itr->stream->priv;
3830 channel->queueReprocMetadata(metadata_buf);
3831 break;
3832 }
3833 }
3834
Thierry Strudel54dc9782017-02-15 12:12:10 -08003835 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003836
3837 bool *enableZsl = nullptr;
3838 if (gExposeEnableZslKey) {
3839 enableZsl = &pendingRequest.enableZsl;
3840 }
3841
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003842 resultMetadata = translateFromHalMetadata(metadata,
3843 pendingRequest.timestamp, pendingRequest.request_id,
3844 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3845 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003846 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003847 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003848 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003849 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003850 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003851 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003852
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003853 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003854
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003855 if (pendingRequest.blob_request) {
3856 //Dump tuning metadata if enabled and available
3857 char prop[PROPERTY_VALUE_MAX];
3858 memset(prop, 0, sizeof(prop));
3859 property_get("persist.camera.dumpmetadata", prop, "0");
3860 int32_t enabled = atoi(prop);
3861 if (enabled && metadata->is_tuning_params_valid) {
3862 dumpMetadataToFile(metadata->tuning_params,
3863 mMetaFrameCount,
3864 enabled,
3865 "Snapshot",
3866 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003867 }
3868 }
3869
3870 if (!internalPproc) {
3871 LOGD("couldn't find need_metadata for this metadata");
3872 // Return metadata buffer
3873 if (free_and_bufdone_meta_buf) {
3874 mMetadataChannel->bufDone(metadata_buf);
3875 free(metadata_buf);
3876 }
3877 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003878
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003879 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003880 }
3881 }
3882
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003883 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3884
3885 // Try to send out capture result metadata.
3886 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003887 return;
3888
Thierry Strudel3d639192016-09-09 11:52:26 -07003889done_metadata:
3890 for (pendingRequestIterator i = mPendingRequestsList.begin();
3891 i != mPendingRequestsList.end() ;i++) {
3892 i->pipeline_depth++;
3893 }
3894 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3895 unblockRequestIfNecessary();
3896}
3897
3898/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003899 * FUNCTION : handleDepthDataWithLock
3900 *
3901 * DESCRIPTION: Handles incoming depth data
3902 *
3903 * PARAMETERS : @depthData : Depth data
3904 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003905 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003906 *
3907 * RETURN :
3908 *
3909 *==========================================================================*/
3910void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003911 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003912 uint32_t currentFrameNumber;
3913 buffer_handle_t *depthBuffer;
3914
3915 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003916 return;
3917 }
3918
3919 camera3_stream_buffer_t resultBuffer =
3920 {.acquire_fence = -1,
3921 .release_fence = -1,
3922 .status = CAMERA3_BUFFER_STATUS_OK,
3923 .buffer = nullptr,
3924 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003925 do {
3926 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3927 if (nullptr == depthBuffer) {
3928 break;
3929 }
3930
Emilian Peev7650c122017-01-19 08:24:33 -08003931 resultBuffer.buffer = depthBuffer;
3932 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003933 if (valid) {
3934 int32_t rc = mDepthChannel->populateDepthData(depthData,
3935 frameNumber);
3936 if (NO_ERROR != rc) {
3937 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3938 } else {
3939 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3940 }
Emilian Peev7650c122017-01-19 08:24:33 -08003941 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003942 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003943 }
3944 } else if (currentFrameNumber > frameNumber) {
3945 break;
3946 } else {
3947 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3948 {{currentFrameNumber, mDepthChannel->getStream(),
3949 CAMERA3_MSG_ERROR_BUFFER}}};
3950 orchestrateNotify(&notify_msg);
3951
3952 LOGE("Depth buffer for frame number: %d is missing "
3953 "returning back!", currentFrameNumber);
3954 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3955 }
3956 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003957 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003958 } while (currentFrameNumber < frameNumber);
3959}
3960
3961/*===========================================================================
3962 * FUNCTION : notifyErrorFoPendingDepthData
3963 *
3964 * DESCRIPTION: Returns error for any pending depth buffers
3965 *
3966 * PARAMETERS : depthCh - depth channel that needs to get flushed
3967 *
3968 * RETURN :
3969 *
3970 *==========================================================================*/
3971void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3972 QCamera3DepthChannel *depthCh) {
3973 uint32_t currentFrameNumber;
3974 buffer_handle_t *depthBuffer;
3975
3976 if (nullptr == depthCh) {
3977 return;
3978 }
3979
3980 camera3_notify_msg_t notify_msg =
3981 {.type = CAMERA3_MSG_ERROR,
3982 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3983 camera3_stream_buffer_t resultBuffer =
3984 {.acquire_fence = -1,
3985 .release_fence = -1,
3986 .buffer = nullptr,
3987 .stream = depthCh->getStream(),
3988 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003989
3990 while (nullptr !=
3991 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3992 depthCh->unmapBuffer(currentFrameNumber);
3993
3994 notify_msg.message.error.frame_number = currentFrameNumber;
3995 orchestrateNotify(&notify_msg);
3996
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003997 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003998 };
3999}
4000
4001/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004002 * FUNCTION : hdrPlusPerfLock
4003 *
4004 * DESCRIPTION: perf lock for HDR+ using custom intent
4005 *
4006 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4007 *
4008 * RETURN : None
4009 *
4010 *==========================================================================*/
4011void QCamera3HardwareInterface::hdrPlusPerfLock(
4012 mm_camera_super_buf_t *metadata_buf)
4013{
4014 if (NULL == metadata_buf) {
4015 LOGE("metadata_buf is NULL");
4016 return;
4017 }
4018 metadata_buffer_t *metadata =
4019 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4020 int32_t *p_frame_number_valid =
4021 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4022 uint32_t *p_frame_number =
4023 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4024
4025 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4026 LOGE("%s: Invalid metadata", __func__);
4027 return;
4028 }
4029
4030 //acquire perf lock for 5 sec after the last HDR frame is captured
4031 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4032 if ((p_frame_number != NULL) &&
4033 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004034 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004035 }
4036 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004037}
4038
4039/*===========================================================================
4040 * FUNCTION : handleInputBufferWithLock
4041 *
4042 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4043 *
4044 * PARAMETERS : @frame_number: frame number of the input buffer
4045 *
4046 * RETURN :
4047 *
4048 *==========================================================================*/
4049void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4050{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004051 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004052 pendingRequestIterator i = mPendingRequestsList.begin();
4053 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4054 i++;
4055 }
4056 if (i != mPendingRequestsList.end() && i->input_buffer) {
4057 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004058 CameraMetadata settings;
4059 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4060 if(i->settings) {
4061 settings = i->settings;
4062 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4063 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004064 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004065 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004066 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004067 } else {
4068 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004069 }
4070
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004071 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4072 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4073 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004074
4075 camera3_capture_result result;
4076 memset(&result, 0, sizeof(camera3_capture_result));
4077 result.frame_number = frame_number;
4078 result.result = i->settings;
4079 result.input_buffer = i->input_buffer;
4080 result.partial_result = PARTIAL_RESULT_COUNT;
4081
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004082 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004083 LOGD("Input request metadata and input buffer frame_number = %u",
4084 i->frame_number);
4085 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004086
4087 // Dispatch result metadata that may be just unblocked by this reprocess result.
4088 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004089 } else {
4090 LOGE("Could not find input request for frame number %d", frame_number);
4091 }
4092}
4093
4094/*===========================================================================
4095 * FUNCTION : handleBufferWithLock
4096 *
4097 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4098 *
4099 * PARAMETERS : @buffer: image buffer for the callback
4100 * @frame_number: frame number of the image buffer
4101 *
4102 * RETURN :
4103 *
4104 *==========================================================================*/
4105void QCamera3HardwareInterface::handleBufferWithLock(
4106 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4107{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004108 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004109
4110 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4111 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4112 }
4113
Thierry Strudel3d639192016-09-09 11:52:26 -07004114 /* Nothing to be done during error state */
4115 if ((ERROR == mState) || (DEINIT == mState)) {
4116 return;
4117 }
4118 if (mFlushPerf) {
4119 handleBuffersDuringFlushLock(buffer);
4120 return;
4121 }
4122 //not in flush
4123 // If the frame number doesn't exist in the pending request list,
4124 // directly send the buffer to the frameworks, and update pending buffers map
4125 // Otherwise, book-keep the buffer.
4126 pendingRequestIterator i = mPendingRequestsList.begin();
4127 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4128 i++;
4129 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004130
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004131 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004132 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004133 // For a reprocessing request, try to send out result metadata.
4134 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004135 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004136 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004137
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004138 // Check if this frame was dropped.
4139 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4140 m != mPendingFrameDropList.end(); m++) {
4141 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4142 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4143 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4144 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4145 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4146 frame_number, streamID);
4147 m = mPendingFrameDropList.erase(m);
4148 break;
4149 }
4150 }
4151
4152 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4153 LOGH("result frame_number = %d, buffer = %p",
4154 frame_number, buffer->buffer);
4155
4156 mPendingBuffersMap.removeBuf(buffer->buffer);
4157 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4158
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004159 if (mPreviewStarted == false) {
4160 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4161 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004162 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4163
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004164 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4165 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4166 mPreviewStarted = true;
4167
4168 // Set power hint for preview
4169 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4170 }
4171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004172}
4173
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004174void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004175 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004176{
4177 // Find the pending request for this result metadata.
4178 auto requestIter = mPendingRequestsList.begin();
4179 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4180 requestIter++;
4181 }
4182
4183 if (requestIter == mPendingRequestsList.end()) {
4184 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4185 return;
4186 }
4187
4188 // Update the result metadata
4189 requestIter->resultMetadata = resultMetadata;
4190
4191 // Check what type of request this is.
4192 bool liveRequest = false;
4193 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004194 // HDR+ request doesn't have partial results.
4195 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004196 } else if (requestIter->input_buffer != nullptr) {
4197 // Reprocessing request result is the same as settings.
4198 requestIter->resultMetadata = requestIter->settings;
4199 // Reprocessing request doesn't have partial results.
4200 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4201 } else {
4202 liveRequest = true;
4203 requestIter->partial_result_cnt++;
4204 mPendingLiveRequest--;
4205
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004206 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004207 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004208 // For a live request, send the metadata to HDR+ client.
4209 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4210 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4211 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4212 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004213 }
4214 }
4215
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004216 // Remove len shading map if it's not requested.
4217 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4218 CameraMetadata metadata;
4219 metadata.acquire(resultMetadata);
4220 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4221 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4222 &requestIter->requestedLensShadingMapMode, 1);
4223
4224 requestIter->resultMetadata = metadata.release();
4225 }
4226
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004227 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4228}
4229
4230void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4231 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004232 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4233 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004234 bool readyToSend = true;
4235
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004236 // Iterate through the pending requests to send out result metadata that are ready. Also if
4237 // this result metadata belongs to a live request, notify errors for previous live requests
4238 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004239 auto iter = mPendingRequestsList.begin();
4240 while (iter != mPendingRequestsList.end()) {
4241 // Check if current pending request is ready. If it's not ready, the following pending
4242 // requests are also not ready.
4243 if (readyToSend && iter->resultMetadata == nullptr) {
4244 readyToSend = false;
4245 }
4246
4247 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4248
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004249 camera3_capture_result_t result = {};
4250 result.frame_number = iter->frame_number;
4251 result.result = iter->resultMetadata;
4252 result.partial_result = iter->partial_result_cnt;
4253
4254 // If this pending buffer has result metadata, we may be able to send out shutter callback
4255 // and result metadata.
4256 if (iter->resultMetadata != nullptr) {
4257 if (!readyToSend) {
4258 // If any of the previous pending request is not ready, this pending request is
4259 // also not ready to send in order to keep shutter callbacks and result metadata
4260 // in order.
4261 iter++;
4262 continue;
4263 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004264 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004265 // If the result metadata belongs to a live request, notify errors for previous pending
4266 // live requests.
4267 mPendingLiveRequest--;
4268
4269 CameraMetadata dummyMetadata;
4270 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4271 result.result = dummyMetadata.release();
4272
4273 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004274
4275 // partial_result should be PARTIAL_RESULT_CNT in case of
4276 // ERROR_RESULT.
4277 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4278 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004279 } else {
4280 iter++;
4281 continue;
4282 }
4283
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004284 result.output_buffers = nullptr;
4285 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004286 orchestrateResult(&result);
4287
4288 // For reprocessing, result metadata is the same as settings so do not free it here to
4289 // avoid double free.
4290 if (result.result != iter->settings) {
4291 free_camera_metadata((camera_metadata_t *)result.result);
4292 }
4293 iter->resultMetadata = nullptr;
4294 iter = erasePendingRequest(iter);
4295 }
4296
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004297 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004298 for (auto &iter : mPendingRequestsList) {
4299 // Increment pipeline depth for the following pending requests.
4300 if (iter.frame_number > frameNumber) {
4301 iter.pipeline_depth++;
4302 }
4303 }
4304 }
4305
4306 unblockRequestIfNecessary();
4307}
4308
Thierry Strudel3d639192016-09-09 11:52:26 -07004309/*===========================================================================
4310 * FUNCTION : unblockRequestIfNecessary
4311 *
4312 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4313 * that mMutex is held when this function is called.
4314 *
4315 * PARAMETERS :
4316 *
4317 * RETURN :
4318 *
4319 *==========================================================================*/
4320void QCamera3HardwareInterface::unblockRequestIfNecessary()
4321{
4322 // Unblock process_capture_request
4323 pthread_cond_signal(&mRequestCond);
4324}
4325
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004326/*===========================================================================
4327 * FUNCTION : isHdrSnapshotRequest
4328 *
4329 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4330 *
4331 * PARAMETERS : camera3 request structure
4332 *
4333 * RETURN : boolean decision variable
4334 *
4335 *==========================================================================*/
4336bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4337{
4338 if (request == NULL) {
4339 LOGE("Invalid request handle");
4340 assert(0);
4341 return false;
4342 }
4343
4344 if (!mForceHdrSnapshot) {
4345 CameraMetadata frame_settings;
4346 frame_settings = request->settings;
4347
4348 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4349 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4350 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4351 return false;
4352 }
4353 } else {
4354 return false;
4355 }
4356
4357 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4358 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4359 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4360 return false;
4361 }
4362 } else {
4363 return false;
4364 }
4365 }
4366
4367 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4368 if (request->output_buffers[i].stream->format
4369 == HAL_PIXEL_FORMAT_BLOB) {
4370 return true;
4371 }
4372 }
4373
4374 return false;
4375}
4376/*===========================================================================
4377 * FUNCTION : orchestrateRequest
4378 *
4379 * DESCRIPTION: Orchestrates a capture request from camera service
4380 *
4381 * PARAMETERS :
4382 * @request : request from framework to process
4383 *
4384 * RETURN : Error status codes
4385 *
4386 *==========================================================================*/
4387int32_t QCamera3HardwareInterface::orchestrateRequest(
4388 camera3_capture_request_t *request)
4389{
4390
4391 uint32_t originalFrameNumber = request->frame_number;
4392 uint32_t originalOutputCount = request->num_output_buffers;
4393 const camera_metadata_t *original_settings = request->settings;
4394 List<InternalRequest> internallyRequestedStreams;
4395 List<InternalRequest> emptyInternalList;
4396
4397 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4398 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4399 uint32_t internalFrameNumber;
4400 CameraMetadata modified_meta;
4401
4402
4403 /* Add Blob channel to list of internally requested streams */
4404 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4405 if (request->output_buffers[i].stream->format
4406 == HAL_PIXEL_FORMAT_BLOB) {
4407 InternalRequest streamRequested;
4408 streamRequested.meteringOnly = 1;
4409 streamRequested.need_metadata = 0;
4410 streamRequested.stream = request->output_buffers[i].stream;
4411 internallyRequestedStreams.push_back(streamRequested);
4412 }
4413 }
4414 request->num_output_buffers = 0;
4415 auto itr = internallyRequestedStreams.begin();
4416
4417 /* Modify setting to set compensation */
4418 modified_meta = request->settings;
4419 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4420 uint8_t aeLock = 1;
4421 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4422 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4423 camera_metadata_t *modified_settings = modified_meta.release();
4424 request->settings = modified_settings;
4425
4426 /* Capture Settling & -2x frame */
4427 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4428 request->frame_number = internalFrameNumber;
4429 processCaptureRequest(request, internallyRequestedStreams);
4430
4431 request->num_output_buffers = originalOutputCount;
4432 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4433 request->frame_number = internalFrameNumber;
4434 processCaptureRequest(request, emptyInternalList);
4435 request->num_output_buffers = 0;
4436
4437 modified_meta = modified_settings;
4438 expCompensation = 0;
4439 aeLock = 1;
4440 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4441 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4442 modified_settings = modified_meta.release();
4443 request->settings = modified_settings;
4444
4445 /* Capture Settling & 0X frame */
4446
4447 itr = internallyRequestedStreams.begin();
4448 if (itr == internallyRequestedStreams.end()) {
4449 LOGE("Error Internally Requested Stream list is empty");
4450 assert(0);
4451 } else {
4452 itr->need_metadata = 0;
4453 itr->meteringOnly = 1;
4454 }
4455
4456 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4457 request->frame_number = internalFrameNumber;
4458 processCaptureRequest(request, internallyRequestedStreams);
4459
4460 itr = internallyRequestedStreams.begin();
4461 if (itr == internallyRequestedStreams.end()) {
4462 ALOGE("Error Internally Requested Stream list is empty");
4463 assert(0);
4464 } else {
4465 itr->need_metadata = 1;
4466 itr->meteringOnly = 0;
4467 }
4468
4469 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4470 request->frame_number = internalFrameNumber;
4471 processCaptureRequest(request, internallyRequestedStreams);
4472
4473 /* Capture 2X frame*/
4474 modified_meta = modified_settings;
4475 expCompensation = GB_HDR_2X_STEP_EV;
4476 aeLock = 1;
4477 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4478 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4479 modified_settings = modified_meta.release();
4480 request->settings = modified_settings;
4481
4482 itr = internallyRequestedStreams.begin();
4483 if (itr == internallyRequestedStreams.end()) {
4484 ALOGE("Error Internally Requested Stream list is empty");
4485 assert(0);
4486 } else {
4487 itr->need_metadata = 0;
4488 itr->meteringOnly = 1;
4489 }
4490 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4491 request->frame_number = internalFrameNumber;
4492 processCaptureRequest(request, internallyRequestedStreams);
4493
4494 itr = internallyRequestedStreams.begin();
4495 if (itr == internallyRequestedStreams.end()) {
4496 ALOGE("Error Internally Requested Stream list is empty");
4497 assert(0);
4498 } else {
4499 itr->need_metadata = 1;
4500 itr->meteringOnly = 0;
4501 }
4502
4503 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4504 request->frame_number = internalFrameNumber;
4505 processCaptureRequest(request, internallyRequestedStreams);
4506
4507
4508 /* Capture 2X on original streaming config*/
4509 internallyRequestedStreams.clear();
4510
4511 /* Restore original settings pointer */
4512 request->settings = original_settings;
4513 } else {
4514 uint32_t internalFrameNumber;
4515 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4516 request->frame_number = internalFrameNumber;
4517 return processCaptureRequest(request, internallyRequestedStreams);
4518 }
4519
4520 return NO_ERROR;
4521}
4522
4523/*===========================================================================
4524 * FUNCTION : orchestrateResult
4525 *
4526 * DESCRIPTION: Orchestrates a capture result to camera service
4527 *
4528 * PARAMETERS :
4529 * @request : request from framework to process
4530 *
4531 * RETURN :
4532 *
4533 *==========================================================================*/
4534void QCamera3HardwareInterface::orchestrateResult(
4535 camera3_capture_result_t *result)
4536{
4537 uint32_t frameworkFrameNumber;
4538 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4539 frameworkFrameNumber);
4540 if (rc != NO_ERROR) {
4541 LOGE("Cannot find translated frameworkFrameNumber");
4542 assert(0);
4543 } else {
4544 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004545 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004546 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004547 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004548 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4549 camera_metadata_entry_t entry;
4550 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4551 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004552 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004553 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4554 if (ret != OK)
4555 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004556 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004557 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004558 result->frame_number = frameworkFrameNumber;
4559 mCallbackOps->process_capture_result(mCallbackOps, result);
4560 }
4561 }
4562}
4563
4564/*===========================================================================
4565 * FUNCTION : orchestrateNotify
4566 *
4567 * DESCRIPTION: Orchestrates a notify to camera service
4568 *
4569 * PARAMETERS :
4570 * @request : request from framework to process
4571 *
4572 * RETURN :
4573 *
4574 *==========================================================================*/
4575void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4576{
4577 uint32_t frameworkFrameNumber;
4578 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004579 int32_t rc = NO_ERROR;
4580
4581 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004582 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004583
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004584 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004585 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4586 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4587 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004588 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004589 LOGE("Cannot find translated frameworkFrameNumber");
4590 assert(0);
4591 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004592 }
4593 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004594
4595 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4596 LOGD("Internal Request drop the notifyCb");
4597 } else {
4598 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4599 mCallbackOps->notify(mCallbackOps, notify_msg);
4600 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004601}
4602
4603/*===========================================================================
4604 * FUNCTION : FrameNumberRegistry
4605 *
4606 * DESCRIPTION: Constructor
4607 *
4608 * PARAMETERS :
4609 *
4610 * RETURN :
4611 *
4612 *==========================================================================*/
4613FrameNumberRegistry::FrameNumberRegistry()
4614{
4615 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4616}
4617
4618/*===========================================================================
4619 * FUNCTION : ~FrameNumberRegistry
4620 *
4621 * DESCRIPTION: Destructor
4622 *
4623 * PARAMETERS :
4624 *
4625 * RETURN :
4626 *
4627 *==========================================================================*/
4628FrameNumberRegistry::~FrameNumberRegistry()
4629{
4630}
4631
4632/*===========================================================================
4633 * FUNCTION : PurgeOldEntriesLocked
4634 *
4635 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4636 *
4637 * PARAMETERS :
4638 *
4639 * RETURN : NONE
4640 *
4641 *==========================================================================*/
4642void FrameNumberRegistry::purgeOldEntriesLocked()
4643{
4644 while (_register.begin() != _register.end()) {
4645 auto itr = _register.begin();
4646 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4647 _register.erase(itr);
4648 } else {
4649 return;
4650 }
4651 }
4652}
4653
4654/*===========================================================================
4655 * FUNCTION : allocStoreInternalFrameNumber
4656 *
4657 * DESCRIPTION: Method to note down a framework request and associate a new
4658 * internal request number against it
4659 *
4660 * PARAMETERS :
4661 * @fFrameNumber: Identifier given by framework
4662 * @internalFN : Output parameter which will have the newly generated internal
4663 * entry
4664 *
4665 * RETURN : Error code
4666 *
4667 *==========================================================================*/
4668int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4669 uint32_t &internalFrameNumber)
4670{
4671 Mutex::Autolock lock(mRegistryLock);
4672 internalFrameNumber = _nextFreeInternalNumber++;
4673 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4674 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4675 purgeOldEntriesLocked();
4676 return NO_ERROR;
4677}
4678
4679/*===========================================================================
4680 * FUNCTION : generateStoreInternalFrameNumber
4681 *
4682 * DESCRIPTION: Method to associate a new internal request number independent
4683 * of any associate with framework requests
4684 *
4685 * PARAMETERS :
4686 * @internalFrame#: Output parameter which will have the newly generated internal
4687 *
4688 *
4689 * RETURN : Error code
4690 *
4691 *==========================================================================*/
4692int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4693{
4694 Mutex::Autolock lock(mRegistryLock);
4695 internalFrameNumber = _nextFreeInternalNumber++;
4696 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4697 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4698 purgeOldEntriesLocked();
4699 return NO_ERROR;
4700}
4701
4702/*===========================================================================
4703 * FUNCTION : getFrameworkFrameNumber
4704 *
4705 * DESCRIPTION: Method to query the framework framenumber given an internal #
4706 *
4707 * PARAMETERS :
4708 * @internalFrame#: Internal reference
4709 * @frameworkframenumber: Output parameter holding framework frame entry
4710 *
4711 * RETURN : Error code
4712 *
4713 *==========================================================================*/
4714int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4715 uint32_t &frameworkFrameNumber)
4716{
4717 Mutex::Autolock lock(mRegistryLock);
4718 auto itr = _register.find(internalFrameNumber);
4719 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004720 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004721 return -ENOENT;
4722 }
4723
4724 frameworkFrameNumber = itr->second;
4725 purgeOldEntriesLocked();
4726 return NO_ERROR;
4727}
Thierry Strudel3d639192016-09-09 11:52:26 -07004728
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004729status_t QCamera3HardwareInterface::fillPbStreamConfig(
4730 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4731 QCamera3Channel *channel, uint32_t streamIndex) {
4732 if (config == nullptr) {
4733 LOGE("%s: config is null", __FUNCTION__);
4734 return BAD_VALUE;
4735 }
4736
4737 if (channel == nullptr) {
4738 LOGE("%s: channel is null", __FUNCTION__);
4739 return BAD_VALUE;
4740 }
4741
4742 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4743 if (stream == nullptr) {
4744 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4745 return NAME_NOT_FOUND;
4746 }
4747
4748 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4749 if (streamInfo == nullptr) {
4750 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4751 return NAME_NOT_FOUND;
4752 }
4753
4754 config->id = pbStreamId;
4755 config->image.width = streamInfo->dim.width;
4756 config->image.height = streamInfo->dim.height;
4757 config->image.padding = 0;
4758 config->image.format = pbStreamFormat;
4759
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004760 uint32_t totalPlaneSize = 0;
4761
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004762 // Fill plane information.
4763 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4764 pbcamera::PlaneConfiguration plane;
4765 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4766 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4767 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004768
4769 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004770 }
4771
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004772 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004773 return OK;
4774}
4775
Thierry Strudel3d639192016-09-09 11:52:26 -07004776/*===========================================================================
4777 * FUNCTION : processCaptureRequest
4778 *
4779 * DESCRIPTION: process a capture request from camera service
4780 *
4781 * PARAMETERS :
4782 * @request : request from framework to process
4783 *
4784 * RETURN :
4785 *
4786 *==========================================================================*/
4787int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004788 camera3_capture_request_t *request,
4789 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004790{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004791 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004792 int rc = NO_ERROR;
4793 int32_t request_id;
4794 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004795 bool isVidBufRequested = false;
4796 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004797 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004798
4799 pthread_mutex_lock(&mMutex);
4800
4801 // Validate current state
4802 switch (mState) {
4803 case CONFIGURED:
4804 case STARTED:
4805 /* valid state */
4806 break;
4807
4808 case ERROR:
4809 pthread_mutex_unlock(&mMutex);
4810 handleCameraDeviceError();
4811 return -ENODEV;
4812
4813 default:
4814 LOGE("Invalid state %d", mState);
4815 pthread_mutex_unlock(&mMutex);
4816 return -ENODEV;
4817 }
4818
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004819 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004820 if (rc != NO_ERROR) {
4821 LOGE("incoming request is not valid");
4822 pthread_mutex_unlock(&mMutex);
4823 return rc;
4824 }
4825
4826 meta = request->settings;
4827
4828 // For first capture request, send capture intent, and
4829 // stream on all streams
4830 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004831 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 // send an unconfigure to the backend so that the isp
4833 // resources are deallocated
4834 if (!mFirstConfiguration) {
4835 cam_stream_size_info_t stream_config_info;
4836 int32_t hal_version = CAM_HAL_V3;
4837 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4838 stream_config_info.buffer_info.min_buffers =
4839 MIN_INFLIGHT_REQUESTS;
4840 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004841 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004842 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004843 clear_metadata_buffer(mParameters);
4844 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4845 CAM_INTF_PARM_HAL_VERSION, hal_version);
4846 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4847 CAM_INTF_META_STREAM_INFO, stream_config_info);
4848 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4849 mParameters);
4850 if (rc < 0) {
4851 LOGE("set_parms for unconfigure failed");
4852 pthread_mutex_unlock(&mMutex);
4853 return rc;
4854 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004855
Thierry Strudel3d639192016-09-09 11:52:26 -07004856 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004857 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004858 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004859 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004860 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004861 property_get("persist.camera.is_type", is_type_value, "4");
4862 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4863 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4864 property_get("persist.camera.is_type_preview", is_type_value, "4");
4865 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4866 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004867
4868 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4869 int32_t hal_version = CAM_HAL_V3;
4870 uint8_t captureIntent =
4871 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4872 mCaptureIntent = captureIntent;
4873 clear_metadata_buffer(mParameters);
4874 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4875 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4876 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004877 if (mFirstConfiguration) {
4878 // configure instant AEC
4879 // Instant AEC is a session based parameter and it is needed only
4880 // once per complete session after open camera.
4881 // i.e. This is set only once for the first capture request, after open camera.
4882 setInstantAEC(meta);
4883 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004884 uint8_t fwkVideoStabMode=0;
4885 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4886 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4887 }
4888
Xue Tuecac74e2017-04-17 13:58:15 -07004889 // If EIS setprop is enabled then only turn it on for video/preview
4890 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004891 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004892 int32_t vsMode;
4893 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4894 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4895 rc = BAD_VALUE;
4896 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004897 LOGD("setEis %d", setEis);
4898 bool eis3Supported = false;
4899 size_t count = IS_TYPE_MAX;
4900 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4901 for (size_t i = 0; i < count; i++) {
4902 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4903 eis3Supported = true;
4904 break;
4905 }
4906 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004907
4908 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004909 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004910 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4911 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004912 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4913 is_type = isTypePreview;
4914 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4915 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4916 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004917 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004918 } else {
4919 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004921 } else {
4922 is_type = IS_TYPE_NONE;
4923 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004924 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004925 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004926 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4927 }
4928 }
4929
4930 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4931 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4932
Thierry Strudel54dc9782017-02-15 12:12:10 -08004933 //Disable tintless only if the property is set to 0
4934 memset(prop, 0, sizeof(prop));
4935 property_get("persist.camera.tintless.enable", prop, "1");
4936 int32_t tintless_value = atoi(prop);
4937
Thierry Strudel3d639192016-09-09 11:52:26 -07004938 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4939 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004940
Thierry Strudel3d639192016-09-09 11:52:26 -07004941 //Disable CDS for HFR mode or if DIS/EIS is on.
4942 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4943 //after every configure_stream
4944 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4945 (m_bIsVideo)) {
4946 int32_t cds = CAM_CDS_MODE_OFF;
4947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4948 CAM_INTF_PARM_CDS_MODE, cds))
4949 LOGE("Failed to disable CDS for HFR mode");
4950
4951 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004952
4953 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4954 uint8_t* use_av_timer = NULL;
4955
4956 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004957 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004958 use_av_timer = &m_debug_avtimer;
4959 }
4960 else{
4961 use_av_timer =
4962 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004963 if (use_av_timer) {
4964 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4965 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004966 }
4967
4968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4969 rc = BAD_VALUE;
4970 }
4971 }
4972
Thierry Strudel3d639192016-09-09 11:52:26 -07004973 setMobicat();
4974
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004975 uint8_t nrMode = 0;
4976 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4977 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4978 }
4979
Thierry Strudel3d639192016-09-09 11:52:26 -07004980 /* Set fps and hfr mode while sending meta stream info so that sensor
4981 * can configure appropriate streaming mode */
4982 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4984 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004985 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4986 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004987 if (rc == NO_ERROR) {
4988 int32_t max_fps =
4989 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004990 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004991 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4992 }
4993 /* For HFR, more buffers are dequeued upfront to improve the performance */
4994 if (mBatchSize) {
4995 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4996 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4997 }
4998 }
4999 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005000 LOGE("setHalFpsRange failed");
5001 }
5002 }
5003 if (meta.exists(ANDROID_CONTROL_MODE)) {
5004 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5005 rc = extractSceneMode(meta, metaMode, mParameters);
5006 if (rc != NO_ERROR) {
5007 LOGE("extractSceneMode failed");
5008 }
5009 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005010 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005011
Thierry Strudel04e026f2016-10-10 11:27:36 -07005012 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5013 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5014 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5015 rc = setVideoHdrMode(mParameters, vhdr);
5016 if (rc != NO_ERROR) {
5017 LOGE("setVideoHDR is failed");
5018 }
5019 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005020
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005021 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005022 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005023 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005024 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5025 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5026 sensorModeFullFov)) {
5027 rc = BAD_VALUE;
5028 }
5029 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005030 //TODO: validate the arguments, HSV scenemode should have only the
5031 //advertised fps ranges
5032
5033 /*set the capture intent, hal version, tintless, stream info,
5034 *and disenable parameters to the backend*/
5035 LOGD("set_parms META_STREAM_INFO " );
5036 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005037 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5038 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005039 mStreamConfigInfo.type[i],
5040 mStreamConfigInfo.stream_sizes[i].width,
5041 mStreamConfigInfo.stream_sizes[i].height,
5042 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005043 mStreamConfigInfo.format[i],
5044 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005045 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005046
Thierry Strudel3d639192016-09-09 11:52:26 -07005047 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5048 mParameters);
5049 if (rc < 0) {
5050 LOGE("set_parms failed for hal version, stream info");
5051 }
5052
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005053 cam_sensor_mode_info_t sensorModeInfo = {};
5054 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005055 if (rc != NO_ERROR) {
5056 LOGE("Failed to get sensor output size");
5057 pthread_mutex_unlock(&mMutex);
5058 goto error_exit;
5059 }
5060
5061 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5062 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005063 sensorModeInfo.active_array_size.width,
5064 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005065
5066 /* Set batchmode before initializing channel. Since registerBuffer
5067 * internally initializes some of the channels, better set batchmode
5068 * even before first register buffer */
5069 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5070 it != mStreamInfo.end(); it++) {
5071 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5072 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5073 && mBatchSize) {
5074 rc = channel->setBatchSize(mBatchSize);
5075 //Disable per frame map unmap for HFR/batchmode case
5076 rc |= channel->setPerFrameMapUnmap(false);
5077 if (NO_ERROR != rc) {
5078 LOGE("Channel init failed %d", rc);
5079 pthread_mutex_unlock(&mMutex);
5080 goto error_exit;
5081 }
5082 }
5083 }
5084
5085 //First initialize all streams
5086 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5087 it != mStreamInfo.end(); it++) {
5088 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005089
5090 /* Initial value of NR mode is needed before stream on */
5091 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005092 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5093 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005094 setEis) {
5095 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5096 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5097 is_type = mStreamConfigInfo.is_type[i];
5098 break;
5099 }
5100 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005101 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005102 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005103 rc = channel->initialize(IS_TYPE_NONE);
5104 }
5105 if (NO_ERROR != rc) {
5106 LOGE("Channel initialization failed %d", rc);
5107 pthread_mutex_unlock(&mMutex);
5108 goto error_exit;
5109 }
5110 }
5111
5112 if (mRawDumpChannel) {
5113 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5114 if (rc != NO_ERROR) {
5115 LOGE("Error: Raw Dump Channel init failed");
5116 pthread_mutex_unlock(&mMutex);
5117 goto error_exit;
5118 }
5119 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005120 if (mHdrPlusRawSrcChannel) {
5121 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5122 if (rc != NO_ERROR) {
5123 LOGE("Error: HDR+ RAW Source Channel init failed");
5124 pthread_mutex_unlock(&mMutex);
5125 goto error_exit;
5126 }
5127 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005128 if (mSupportChannel) {
5129 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5130 if (rc < 0) {
5131 LOGE("Support channel initialization failed");
5132 pthread_mutex_unlock(&mMutex);
5133 goto error_exit;
5134 }
5135 }
5136 if (mAnalysisChannel) {
5137 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5138 if (rc < 0) {
5139 LOGE("Analysis channel initialization failed");
5140 pthread_mutex_unlock(&mMutex);
5141 goto error_exit;
5142 }
5143 }
5144 if (mDummyBatchChannel) {
5145 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5146 if (rc < 0) {
5147 LOGE("mDummyBatchChannel setBatchSize failed");
5148 pthread_mutex_unlock(&mMutex);
5149 goto error_exit;
5150 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005151 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005152 if (rc < 0) {
5153 LOGE("mDummyBatchChannel initialization failed");
5154 pthread_mutex_unlock(&mMutex);
5155 goto error_exit;
5156 }
5157 }
5158
5159 // Set bundle info
5160 rc = setBundleInfo();
5161 if (rc < 0) {
5162 LOGE("setBundleInfo failed %d", rc);
5163 pthread_mutex_unlock(&mMutex);
5164 goto error_exit;
5165 }
5166
5167 //update settings from app here
5168 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5169 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5170 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5171 }
5172 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5173 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5174 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5175 }
5176 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5177 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5178 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5179
5180 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5181 (mLinkedCameraId != mCameraId) ) {
5182 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5183 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005184 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005185 goto error_exit;
5186 }
5187 }
5188
5189 // add bundle related cameras
5190 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5191 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005192 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5193 &m_pDualCamCmdPtr->bundle_info;
5194 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005195 if (mIsDeviceLinked)
5196 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5197 else
5198 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5199
5200 pthread_mutex_lock(&gCamLock);
5201
5202 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5203 LOGE("Dualcam: Invalid Session Id ");
5204 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005205 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005206 goto error_exit;
5207 }
5208
5209 if (mIsMainCamera == 1) {
5210 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5211 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005212 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005213 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005214 // related session id should be session id of linked session
5215 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5216 } else {
5217 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5218 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005219 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005220 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005221 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5222 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005223 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005224 pthread_mutex_unlock(&gCamLock);
5225
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005226 rc = mCameraHandle->ops->set_dual_cam_cmd(
5227 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 if (rc < 0) {
5229 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005230 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005231 goto error_exit;
5232 }
5233 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005234 goto no_error;
5235error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005236 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005237 return rc;
5238no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005239 mWokenUpByDaemon = false;
5240 mPendingLiveRequest = 0;
5241 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 }
5243
5244 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005245 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005246
5247 if (mFlushPerf) {
5248 //we cannot accept any requests during flush
5249 LOGE("process_capture_request cannot proceed during flush");
5250 pthread_mutex_unlock(&mMutex);
5251 return NO_ERROR; //should return an error
5252 }
5253
5254 if (meta.exists(ANDROID_REQUEST_ID)) {
5255 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5256 mCurrentRequestId = request_id;
5257 LOGD("Received request with id: %d", request_id);
5258 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5259 LOGE("Unable to find request id field, \
5260 & no previous id available");
5261 pthread_mutex_unlock(&mMutex);
5262 return NAME_NOT_FOUND;
5263 } else {
5264 LOGD("Re-using old request id");
5265 request_id = mCurrentRequestId;
5266 }
5267
5268 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5269 request->num_output_buffers,
5270 request->input_buffer,
5271 frameNumber);
5272 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005273 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005274 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005275 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005276 uint32_t snapshotStreamId = 0;
5277 for (size_t i = 0; i < request->num_output_buffers; i++) {
5278 const camera3_stream_buffer_t& output = request->output_buffers[i];
5279 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5280
Emilian Peev7650c122017-01-19 08:24:33 -08005281 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5282 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005283 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 blob_request = 1;
5285 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5286 }
5287
5288 if (output.acquire_fence != -1) {
5289 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5290 close(output.acquire_fence);
5291 if (rc != OK) {
5292 LOGE("sync wait failed %d", rc);
5293 pthread_mutex_unlock(&mMutex);
5294 return rc;
5295 }
5296 }
5297
Emilian Peev0f3c3162017-03-15 12:57:46 +00005298 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5299 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005300 depthRequestPresent = true;
5301 continue;
5302 }
5303
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005304 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005305 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005306
5307 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5308 isVidBufRequested = true;
5309 }
5310 }
5311
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005312 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5313 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5314 itr++) {
5315 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5316 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5317 channel->getStreamID(channel->getStreamTypeMask());
5318
5319 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5320 isVidBufRequested = true;
5321 }
5322 }
5323
Thierry Strudel3d639192016-09-09 11:52:26 -07005324 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005325 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005326 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005327 }
5328 if (blob_request && mRawDumpChannel) {
5329 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005330 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005331 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005332 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005333 }
5334
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005335 {
5336 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5337 // Request a RAW buffer if
5338 // 1. mHdrPlusRawSrcChannel is valid.
5339 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5340 // 3. There is no pending HDR+ request.
5341 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5342 mHdrPlusPendingRequests.size() == 0) {
5343 streamsArray.stream_request[streamsArray.num_streams].streamID =
5344 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5345 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5346 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005347 }
5348
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005349 //extract capture intent
5350 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5351 mCaptureIntent =
5352 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5353 }
5354
5355 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5356 mCacMode =
5357 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5358 }
5359
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005360 uint8_t requestedLensShadingMapMode;
5361 // Get the shading map mode.
5362 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5363 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5364 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5365 } else {
5366 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5367 }
5368
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005369 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005370 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005371
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005372 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005373 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005374 // If this request has a still capture intent, try to submit an HDR+ request.
5375 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5376 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5377 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5378 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005379 }
5380
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005381 if (hdrPlusRequest) {
5382 // For a HDR+ request, just set the frame parameters.
5383 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5384 if (rc < 0) {
5385 LOGE("fail to set frame parameters");
5386 pthread_mutex_unlock(&mMutex);
5387 return rc;
5388 }
5389 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005390 /* Parse the settings:
5391 * - For every request in NORMAL MODE
5392 * - For every request in HFR mode during preview only case
5393 * - For first request of every batch in HFR mode during video
5394 * recording. In batchmode the same settings except frame number is
5395 * repeated in each request of the batch.
5396 */
5397 if (!mBatchSize ||
5398 (mBatchSize && !isVidBufRequested) ||
5399 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005400 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005401 if (rc < 0) {
5402 LOGE("fail to set frame parameters");
5403 pthread_mutex_unlock(&mMutex);
5404 return rc;
5405 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005406
5407 {
5408 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5409 // will be reported in result metadata.
5410 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5411 if (mHdrPlusModeEnabled) {
5412 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5413 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5414 }
5415 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005416 }
5417 /* For batchMode HFR, setFrameParameters is not called for every
5418 * request. But only frame number of the latest request is parsed.
5419 * Keep track of first and last frame numbers in a batch so that
5420 * metadata for the frame numbers of batch can be duplicated in
5421 * handleBatchMetadta */
5422 if (mBatchSize) {
5423 if (!mToBeQueuedVidBufs) {
5424 //start of the batch
5425 mFirstFrameNumberInBatch = request->frame_number;
5426 }
5427 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5428 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5429 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005430 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005431 return BAD_VALUE;
5432 }
5433 }
5434 if (mNeedSensorRestart) {
5435 /* Unlock the mutex as restartSensor waits on the channels to be
5436 * stopped, which in turn calls stream callback functions -
5437 * handleBufferWithLock and handleMetadataWithLock */
5438 pthread_mutex_unlock(&mMutex);
5439 rc = dynamicUpdateMetaStreamInfo();
5440 if (rc != NO_ERROR) {
5441 LOGE("Restarting the sensor failed");
5442 return BAD_VALUE;
5443 }
5444 mNeedSensorRestart = false;
5445 pthread_mutex_lock(&mMutex);
5446 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005447 if(mResetInstantAEC) {
5448 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5449 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5450 mResetInstantAEC = false;
5451 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005452 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005453 if (request->input_buffer->acquire_fence != -1) {
5454 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5455 close(request->input_buffer->acquire_fence);
5456 if (rc != OK) {
5457 LOGE("input buffer sync wait failed %d", rc);
5458 pthread_mutex_unlock(&mMutex);
5459 return rc;
5460 }
5461 }
5462 }
5463
5464 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5465 mLastCustIntentFrmNum = frameNumber;
5466 }
5467 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005468 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005469 pendingRequestIterator latestRequest;
5470 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005471 pendingRequest.num_buffers = depthRequestPresent ?
5472 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005473 pendingRequest.request_id = request_id;
5474 pendingRequest.blob_request = blob_request;
5475 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005476 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005477 if (request->input_buffer) {
5478 pendingRequest.input_buffer =
5479 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5480 *(pendingRequest.input_buffer) = *(request->input_buffer);
5481 pInputBuffer = pendingRequest.input_buffer;
5482 } else {
5483 pendingRequest.input_buffer = NULL;
5484 pInputBuffer = NULL;
5485 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005486 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005487
5488 pendingRequest.pipeline_depth = 0;
5489 pendingRequest.partial_result_cnt = 0;
5490 extractJpegMetadata(mCurJpegMeta, request);
5491 pendingRequest.jpegMetadata = mCurJpegMeta;
5492 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005493 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005494 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5495 mHybridAeEnable =
5496 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5497 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005498
5499 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5500 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005501 /* DevCamDebug metadata processCaptureRequest */
5502 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5503 mDevCamDebugMetaEnable =
5504 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5505 }
5506 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5507 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005508
5509 //extract CAC info
5510 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5511 mCacMode =
5512 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5513 }
5514 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005515 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005516
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005517 // extract enableZsl info
5518 if (gExposeEnableZslKey) {
5519 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5520 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5521 mZslEnabled = pendingRequest.enableZsl;
5522 } else {
5523 pendingRequest.enableZsl = mZslEnabled;
5524 }
5525 }
5526
Thierry Strudel3d639192016-09-09 11:52:26 -07005527 PendingBuffersInRequest bufsForCurRequest;
5528 bufsForCurRequest.frame_number = frameNumber;
5529 // Mark current timestamp for the new request
5530 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005531 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005532
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005533 if (hdrPlusRequest) {
5534 // Save settings for this request.
5535 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5536 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5537
5538 // Add to pending HDR+ request queue.
5539 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5540 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5541
5542 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5543 }
5544
Thierry Strudel3d639192016-09-09 11:52:26 -07005545 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005546 if ((request->output_buffers[i].stream->data_space ==
5547 HAL_DATASPACE_DEPTH) &&
5548 (HAL_PIXEL_FORMAT_BLOB ==
5549 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005550 continue;
5551 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005552 RequestedBufferInfo requestedBuf;
5553 memset(&requestedBuf, 0, sizeof(requestedBuf));
5554 requestedBuf.stream = request->output_buffers[i].stream;
5555 requestedBuf.buffer = NULL;
5556 pendingRequest.buffers.push_back(requestedBuf);
5557
5558 // Add to buffer handle the pending buffers list
5559 PendingBufferInfo bufferInfo;
5560 bufferInfo.buffer = request->output_buffers[i].buffer;
5561 bufferInfo.stream = request->output_buffers[i].stream;
5562 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5563 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5564 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5565 frameNumber, bufferInfo.buffer,
5566 channel->getStreamTypeMask(), bufferInfo.stream->format);
5567 }
5568 // Add this request packet into mPendingBuffersMap
5569 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5570 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5571 mPendingBuffersMap.get_num_overall_buffers());
5572
5573 latestRequest = mPendingRequestsList.insert(
5574 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005575
5576 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5577 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005578 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005579 for (size_t i = 0; i < request->num_output_buffers; i++) {
5580 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5581 }
5582
Thierry Strudel3d639192016-09-09 11:52:26 -07005583 if(mFlush) {
5584 LOGI("mFlush is true");
5585 pthread_mutex_unlock(&mMutex);
5586 return NO_ERROR;
5587 }
5588
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005589 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5590 // channel.
5591 if (!hdrPlusRequest) {
5592 int indexUsed;
5593 // Notify metadata channel we receive a request
5594 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005595
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005596 if(request->input_buffer != NULL){
5597 LOGD("Input request, frame_number %d", frameNumber);
5598 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5599 if (NO_ERROR != rc) {
5600 LOGE("fail to set reproc parameters");
5601 pthread_mutex_unlock(&mMutex);
5602 return rc;
5603 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005604 }
5605
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005606 // Call request on other streams
5607 uint32_t streams_need_metadata = 0;
5608 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5609 for (size_t i = 0; i < request->num_output_buffers; i++) {
5610 const camera3_stream_buffer_t& output = request->output_buffers[i];
5611 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5612
5613 if (channel == NULL) {
5614 LOGW("invalid channel pointer for stream");
5615 continue;
5616 }
5617
5618 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5619 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5620 output.buffer, request->input_buffer, frameNumber);
5621 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005622 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005623 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5624 if (rc < 0) {
5625 LOGE("Fail to request on picture channel");
5626 pthread_mutex_unlock(&mMutex);
5627 return rc;
5628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005629 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005630 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5631 assert(NULL != mDepthChannel);
5632 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005633
Emilian Peev7650c122017-01-19 08:24:33 -08005634 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5635 if (rc < 0) {
5636 LOGE("Fail to map on depth buffer");
5637 pthread_mutex_unlock(&mMutex);
5638 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005639 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005640 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005641 } else {
5642 LOGD("snapshot request with buffer %p, frame_number %d",
5643 output.buffer, frameNumber);
5644 if (!request->settings) {
5645 rc = channel->request(output.buffer, frameNumber,
5646 NULL, mPrevParameters, indexUsed);
5647 } else {
5648 rc = channel->request(output.buffer, frameNumber,
5649 NULL, mParameters, indexUsed);
5650 }
5651 if (rc < 0) {
5652 LOGE("Fail to request on picture channel");
5653 pthread_mutex_unlock(&mMutex);
5654 return rc;
5655 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005656
Emilian Peev7650c122017-01-19 08:24:33 -08005657 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5658 uint32_t j = 0;
5659 for (j = 0; j < streamsArray.num_streams; j++) {
5660 if (streamsArray.stream_request[j].streamID == streamId) {
5661 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5662 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5663 else
5664 streamsArray.stream_request[j].buf_index = indexUsed;
5665 break;
5666 }
5667 }
5668 if (j == streamsArray.num_streams) {
5669 LOGE("Did not find matching stream to update index");
5670 assert(0);
5671 }
5672
5673 pendingBufferIter->need_metadata = true;
5674 streams_need_metadata++;
5675 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005676 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005677 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5678 bool needMetadata = false;
5679 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5680 rc = yuvChannel->request(output.buffer, frameNumber,
5681 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5682 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005683 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005684 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005685 pthread_mutex_unlock(&mMutex);
5686 return rc;
5687 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005688
5689 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5690 uint32_t j = 0;
5691 for (j = 0; j < streamsArray.num_streams; j++) {
5692 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005693 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5694 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5695 else
5696 streamsArray.stream_request[j].buf_index = indexUsed;
5697 break;
5698 }
5699 }
5700 if (j == streamsArray.num_streams) {
5701 LOGE("Did not find matching stream to update index");
5702 assert(0);
5703 }
5704
5705 pendingBufferIter->need_metadata = needMetadata;
5706 if (needMetadata)
5707 streams_need_metadata += 1;
5708 LOGD("calling YUV channel request, need_metadata is %d",
5709 needMetadata);
5710 } else {
5711 LOGD("request with buffer %p, frame_number %d",
5712 output.buffer, frameNumber);
5713
5714 rc = channel->request(output.buffer, frameNumber, indexUsed);
5715
5716 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5717 uint32_t j = 0;
5718 for (j = 0; j < streamsArray.num_streams; j++) {
5719 if (streamsArray.stream_request[j].streamID == streamId) {
5720 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5721 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5722 else
5723 streamsArray.stream_request[j].buf_index = indexUsed;
5724 break;
5725 }
5726 }
5727 if (j == streamsArray.num_streams) {
5728 LOGE("Did not find matching stream to update index");
5729 assert(0);
5730 }
5731
5732 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5733 && mBatchSize) {
5734 mToBeQueuedVidBufs++;
5735 if (mToBeQueuedVidBufs == mBatchSize) {
5736 channel->queueBatchBuf();
5737 }
5738 }
5739 if (rc < 0) {
5740 LOGE("request failed");
5741 pthread_mutex_unlock(&mMutex);
5742 return rc;
5743 }
5744 }
5745 pendingBufferIter++;
5746 }
5747
5748 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5749 itr++) {
5750 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5751
5752 if (channel == NULL) {
5753 LOGE("invalid channel pointer for stream");
5754 assert(0);
5755 return BAD_VALUE;
5756 }
5757
5758 InternalRequest requestedStream;
5759 requestedStream = (*itr);
5760
5761
5762 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5763 LOGD("snapshot request internally input buffer %p, frame_number %d",
5764 request->input_buffer, frameNumber);
5765 if(request->input_buffer != NULL){
5766 rc = channel->request(NULL, frameNumber,
5767 pInputBuffer, &mReprocMeta, indexUsed, true,
5768 requestedStream.meteringOnly);
5769 if (rc < 0) {
5770 LOGE("Fail to request on picture channel");
5771 pthread_mutex_unlock(&mMutex);
5772 return rc;
5773 }
5774 } else {
5775 LOGD("snapshot request with frame_number %d", frameNumber);
5776 if (!request->settings) {
5777 rc = channel->request(NULL, frameNumber,
5778 NULL, mPrevParameters, indexUsed, true,
5779 requestedStream.meteringOnly);
5780 } else {
5781 rc = channel->request(NULL, frameNumber,
5782 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5783 }
5784 if (rc < 0) {
5785 LOGE("Fail to request on picture channel");
5786 pthread_mutex_unlock(&mMutex);
5787 return rc;
5788 }
5789
5790 if ((*itr).meteringOnly != 1) {
5791 requestedStream.need_metadata = 1;
5792 streams_need_metadata++;
5793 }
5794 }
5795
5796 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5797 uint32_t j = 0;
5798 for (j = 0; j < streamsArray.num_streams; j++) {
5799 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005800 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5801 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5802 else
5803 streamsArray.stream_request[j].buf_index = indexUsed;
5804 break;
5805 }
5806 }
5807 if (j == streamsArray.num_streams) {
5808 LOGE("Did not find matching stream to update index");
5809 assert(0);
5810 }
5811
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005812 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005813 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005814 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005815 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005816 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005817 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005818 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005819
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005820 //If 2 streams have need_metadata set to true, fail the request, unless
5821 //we copy/reference count the metadata buffer
5822 if (streams_need_metadata > 1) {
5823 LOGE("not supporting request in which two streams requires"
5824 " 2 HAL metadata for reprocessing");
5825 pthread_mutex_unlock(&mMutex);
5826 return -EINVAL;
5827 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005828
Emilian Peev656e4fa2017-06-02 16:47:04 +01005829 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5830 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5831 if (depthRequestPresent && mDepthChannel) {
5832 if (request->settings) {
5833 camera_metadata_ro_entry entry;
5834 if (find_camera_metadata_ro_entry(request->settings,
5835 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5836 if (entry.data.u8[0]) {
5837 pdafEnable = CAM_PD_DATA_ENABLED;
5838 } else {
5839 pdafEnable = CAM_PD_DATA_SKIP;
5840 }
5841 mDepthCloudMode = pdafEnable;
5842 } else {
5843 pdafEnable = mDepthCloudMode;
5844 }
5845 } else {
5846 pdafEnable = mDepthCloudMode;
5847 }
5848 }
5849
Emilian Peev7650c122017-01-19 08:24:33 -08005850 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5851 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5852 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5853 pthread_mutex_unlock(&mMutex);
5854 return BAD_VALUE;
5855 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005856
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005857 if (request->input_buffer == NULL) {
5858 /* Set the parameters to backend:
5859 * - For every request in NORMAL MODE
5860 * - For every request in HFR mode during preview only case
5861 * - Once every batch in HFR mode during video recording
5862 */
5863 if (!mBatchSize ||
5864 (mBatchSize && !isVidBufRequested) ||
5865 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5866 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5867 mBatchSize, isVidBufRequested,
5868 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005869
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005870 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5871 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5872 uint32_t m = 0;
5873 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5874 if (streamsArray.stream_request[k].streamID ==
5875 mBatchedStreamsArray.stream_request[m].streamID)
5876 break;
5877 }
5878 if (m == mBatchedStreamsArray.num_streams) {
5879 mBatchedStreamsArray.stream_request\
5880 [mBatchedStreamsArray.num_streams].streamID =
5881 streamsArray.stream_request[k].streamID;
5882 mBatchedStreamsArray.stream_request\
5883 [mBatchedStreamsArray.num_streams].buf_index =
5884 streamsArray.stream_request[k].buf_index;
5885 mBatchedStreamsArray.num_streams =
5886 mBatchedStreamsArray.num_streams + 1;
5887 }
5888 }
5889 streamsArray = mBatchedStreamsArray;
5890 }
5891 /* Update stream id of all the requested buffers */
5892 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5893 streamsArray)) {
5894 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005895 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005896 return BAD_VALUE;
5897 }
5898
5899 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5900 mParameters);
5901 if (rc < 0) {
5902 LOGE("set_parms failed");
5903 }
5904 /* reset to zero coz, the batch is queued */
5905 mToBeQueuedVidBufs = 0;
5906 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5907 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5908 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005909 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5910 uint32_t m = 0;
5911 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5912 if (streamsArray.stream_request[k].streamID ==
5913 mBatchedStreamsArray.stream_request[m].streamID)
5914 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005915 }
5916 if (m == mBatchedStreamsArray.num_streams) {
5917 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5918 streamID = streamsArray.stream_request[k].streamID;
5919 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5920 buf_index = streamsArray.stream_request[k].buf_index;
5921 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5922 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005923 }
5924 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005925 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005926
5927 // Start all streams after the first setting is sent, so that the
5928 // setting can be applied sooner: (0 + apply_delay)th frame.
5929 if (mState == CONFIGURED && mChannelHandle) {
5930 //Then start them.
5931 LOGH("Start META Channel");
5932 rc = mMetadataChannel->start();
5933 if (rc < 0) {
5934 LOGE("META channel start failed");
5935 pthread_mutex_unlock(&mMutex);
5936 return rc;
5937 }
5938
5939 if (mAnalysisChannel) {
5940 rc = mAnalysisChannel->start();
5941 if (rc < 0) {
5942 LOGE("Analysis channel start failed");
5943 mMetadataChannel->stop();
5944 pthread_mutex_unlock(&mMutex);
5945 return rc;
5946 }
5947 }
5948
5949 if (mSupportChannel) {
5950 rc = mSupportChannel->start();
5951 if (rc < 0) {
5952 LOGE("Support channel start failed");
5953 mMetadataChannel->stop();
5954 /* Although support and analysis are mutually exclusive today
5955 adding it in anycase for future proofing */
5956 if (mAnalysisChannel) {
5957 mAnalysisChannel->stop();
5958 }
5959 pthread_mutex_unlock(&mMutex);
5960 return rc;
5961 }
5962 }
5963 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5964 it != mStreamInfo.end(); it++) {
5965 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5966 LOGH("Start Processing Channel mask=%d",
5967 channel->getStreamTypeMask());
5968 rc = channel->start();
5969 if (rc < 0) {
5970 LOGE("channel start failed");
5971 pthread_mutex_unlock(&mMutex);
5972 return rc;
5973 }
5974 }
5975
5976 if (mRawDumpChannel) {
5977 LOGD("Starting raw dump stream");
5978 rc = mRawDumpChannel->start();
5979 if (rc != NO_ERROR) {
5980 LOGE("Error Starting Raw Dump Channel");
5981 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5982 it != mStreamInfo.end(); it++) {
5983 QCamera3Channel *channel =
5984 (QCamera3Channel *)(*it)->stream->priv;
5985 LOGH("Stopping Processing Channel mask=%d",
5986 channel->getStreamTypeMask());
5987 channel->stop();
5988 }
5989 if (mSupportChannel)
5990 mSupportChannel->stop();
5991 if (mAnalysisChannel) {
5992 mAnalysisChannel->stop();
5993 }
5994 mMetadataChannel->stop();
5995 pthread_mutex_unlock(&mMutex);
5996 return rc;
5997 }
5998 }
5999
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006000 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006001 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006002 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006003 if (rc != NO_ERROR) {
6004 LOGE("start_channel failed %d", rc);
6005 pthread_mutex_unlock(&mMutex);
6006 return rc;
6007 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006008
6009 {
6010 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006011 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006012
6013 // Now that sensor mode should have been selected, get the selected sensor mode
6014 // info.
6015 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6016 getCurrentSensorModeInfo(mSensorModeInfo);
6017
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006018 if (EaselManagerClientOpened) {
6019 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006020 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6021 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006022 if (rc != OK) {
6023 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6024 mCameraId, mSensorModeInfo.op_pixel_clk);
6025 pthread_mutex_unlock(&mMutex);
6026 return rc;
6027 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006028 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006029 }
6030 }
6031
6032 // Start sensor streaming.
6033 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6034 mChannelHandle);
6035 if (rc != NO_ERROR) {
6036 LOGE("start_sensor_stream_on failed %d", rc);
6037 pthread_mutex_unlock(&mMutex);
6038 return rc;
6039 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006040 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006041 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006042 }
6043
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006044 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006045 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006046 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006047 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006048 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6049 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6050 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6051 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006052
6053 if (isSessionHdrPlusModeCompatible()) {
6054 rc = enableHdrPlusModeLocked();
6055 if (rc != OK) {
6056 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6057 pthread_mutex_unlock(&mMutex);
6058 return rc;
6059 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006060 }
6061
6062 mFirstPreviewIntentSeen = true;
6063 }
6064 }
6065
Thierry Strudel3d639192016-09-09 11:52:26 -07006066 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6067
6068 mState = STARTED;
6069 // Added a timed condition wait
6070 struct timespec ts;
6071 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006072 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006073 if (rc < 0) {
6074 isValidTimeout = 0;
6075 LOGE("Error reading the real time clock!!");
6076 }
6077 else {
6078 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006079 int64_t timeout = 5;
6080 {
6081 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6082 // If there is a pending HDR+ request, the following requests may be blocked until the
6083 // HDR+ request is done. So allow a longer timeout.
6084 if (mHdrPlusPendingRequests.size() > 0) {
6085 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6086 }
6087 }
6088 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006089 }
6090 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006091 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006092 (mState != ERROR) && (mState != DEINIT)) {
6093 if (!isValidTimeout) {
6094 LOGD("Blocking on conditional wait");
6095 pthread_cond_wait(&mRequestCond, &mMutex);
6096 }
6097 else {
6098 LOGD("Blocking on timed conditional wait");
6099 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6100 if (rc == ETIMEDOUT) {
6101 rc = -ENODEV;
6102 LOGE("Unblocked on timeout!!!!");
6103 break;
6104 }
6105 }
6106 LOGD("Unblocked");
6107 if (mWokenUpByDaemon) {
6108 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006109 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006110 break;
6111 }
6112 }
6113 pthread_mutex_unlock(&mMutex);
6114
6115 return rc;
6116}
6117
6118/*===========================================================================
6119 * FUNCTION : dump
6120 *
6121 * DESCRIPTION:
6122 *
6123 * PARAMETERS :
6124 *
6125 *
6126 * RETURN :
6127 *==========================================================================*/
6128void QCamera3HardwareInterface::dump(int fd)
6129{
6130 pthread_mutex_lock(&mMutex);
6131 dprintf(fd, "\n Camera HAL3 information Begin \n");
6132
6133 dprintf(fd, "\nNumber of pending requests: %zu \n",
6134 mPendingRequestsList.size());
6135 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6136 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6137 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6138 for(pendingRequestIterator i = mPendingRequestsList.begin();
6139 i != mPendingRequestsList.end(); i++) {
6140 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6141 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6142 i->input_buffer);
6143 }
6144 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6145 mPendingBuffersMap.get_num_overall_buffers());
6146 dprintf(fd, "-------+------------------\n");
6147 dprintf(fd, " Frame | Stream type mask \n");
6148 dprintf(fd, "-------+------------------\n");
6149 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6150 for(auto &j : req.mPendingBufferList) {
6151 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6152 dprintf(fd, " %5d | %11d \n",
6153 req.frame_number, channel->getStreamTypeMask());
6154 }
6155 }
6156 dprintf(fd, "-------+------------------\n");
6157
6158 dprintf(fd, "\nPending frame drop list: %zu\n",
6159 mPendingFrameDropList.size());
6160 dprintf(fd, "-------+-----------\n");
6161 dprintf(fd, " Frame | Stream ID \n");
6162 dprintf(fd, "-------+-----------\n");
6163 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6164 i != mPendingFrameDropList.end(); i++) {
6165 dprintf(fd, " %5d | %9d \n",
6166 i->frame_number, i->stream_ID);
6167 }
6168 dprintf(fd, "-------+-----------\n");
6169
6170 dprintf(fd, "\n Camera HAL3 information End \n");
6171
6172 /* use dumpsys media.camera as trigger to send update debug level event */
6173 mUpdateDebugLevel = true;
6174 pthread_mutex_unlock(&mMutex);
6175 return;
6176}
6177
6178/*===========================================================================
6179 * FUNCTION : flush
6180 *
6181 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6182 * conditionally restarts channels
6183 *
6184 * PARAMETERS :
6185 * @ restartChannels: re-start all channels
6186 *
6187 *
6188 * RETURN :
6189 * 0 on success
6190 * Error code on failure
6191 *==========================================================================*/
6192int QCamera3HardwareInterface::flush(bool restartChannels)
6193{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006194 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006195 int32_t rc = NO_ERROR;
6196
6197 LOGD("Unblocking Process Capture Request");
6198 pthread_mutex_lock(&mMutex);
6199 mFlush = true;
6200 pthread_mutex_unlock(&mMutex);
6201
6202 rc = stopAllChannels();
6203 // unlink of dualcam
6204 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006205 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6206 &m_pDualCamCmdPtr->bundle_info;
6207 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006208 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6209 pthread_mutex_lock(&gCamLock);
6210
6211 if (mIsMainCamera == 1) {
6212 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6213 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006214 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006215 // related session id should be session id of linked session
6216 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6217 } else {
6218 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6219 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006220 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006221 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6222 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006223 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006224 pthread_mutex_unlock(&gCamLock);
6225
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006226 rc = mCameraHandle->ops->set_dual_cam_cmd(
6227 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006228 if (rc < 0) {
6229 LOGE("Dualcam: Unlink failed, but still proceed to close");
6230 }
6231 }
6232
6233 if (rc < 0) {
6234 LOGE("stopAllChannels failed");
6235 return rc;
6236 }
6237 if (mChannelHandle) {
6238 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6239 mChannelHandle);
6240 }
6241
6242 // Reset bundle info
6243 rc = setBundleInfo();
6244 if (rc < 0) {
6245 LOGE("setBundleInfo failed %d", rc);
6246 return rc;
6247 }
6248
6249 // Mutex Lock
6250 pthread_mutex_lock(&mMutex);
6251
6252 // Unblock process_capture_request
6253 mPendingLiveRequest = 0;
6254 pthread_cond_signal(&mRequestCond);
6255
6256 rc = notifyErrorForPendingRequests();
6257 if (rc < 0) {
6258 LOGE("notifyErrorForPendingRequests failed");
6259 pthread_mutex_unlock(&mMutex);
6260 return rc;
6261 }
6262
6263 mFlush = false;
6264
6265 // Start the Streams/Channels
6266 if (restartChannels) {
6267 rc = startAllChannels();
6268 if (rc < 0) {
6269 LOGE("startAllChannels failed");
6270 pthread_mutex_unlock(&mMutex);
6271 return rc;
6272 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006273 if (mChannelHandle) {
6274 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006275 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006276 if (rc < 0) {
6277 LOGE("start_channel failed");
6278 pthread_mutex_unlock(&mMutex);
6279 return rc;
6280 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006281 }
6282 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006283 pthread_mutex_unlock(&mMutex);
6284
6285 return 0;
6286}
6287
6288/*===========================================================================
6289 * FUNCTION : flushPerf
6290 *
6291 * DESCRIPTION: This is the performance optimization version of flush that does
6292 * not use stream off, rather flushes the system
6293 *
6294 * PARAMETERS :
6295 *
6296 *
6297 * RETURN : 0 : success
6298 * -EINVAL: input is malformed (device is not valid)
6299 * -ENODEV: if the device has encountered a serious error
6300 *==========================================================================*/
6301int QCamera3HardwareInterface::flushPerf()
6302{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006303 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006304 int32_t rc = 0;
6305 struct timespec timeout;
6306 bool timed_wait = false;
6307
6308 pthread_mutex_lock(&mMutex);
6309 mFlushPerf = true;
6310 mPendingBuffersMap.numPendingBufsAtFlush =
6311 mPendingBuffersMap.get_num_overall_buffers();
6312 LOGD("Calling flush. Wait for %d buffers to return",
6313 mPendingBuffersMap.numPendingBufsAtFlush);
6314
6315 /* send the flush event to the backend */
6316 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6317 if (rc < 0) {
6318 LOGE("Error in flush: IOCTL failure");
6319 mFlushPerf = false;
6320 pthread_mutex_unlock(&mMutex);
6321 return -ENODEV;
6322 }
6323
6324 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6325 LOGD("No pending buffers in HAL, return flush");
6326 mFlushPerf = false;
6327 pthread_mutex_unlock(&mMutex);
6328 return rc;
6329 }
6330
6331 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006332 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006333 if (rc < 0) {
6334 LOGE("Error reading the real time clock, cannot use timed wait");
6335 } else {
6336 timeout.tv_sec += FLUSH_TIMEOUT;
6337 timed_wait = true;
6338 }
6339
6340 //Block on conditional variable
6341 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6342 LOGD("Waiting on mBuffersCond");
6343 if (!timed_wait) {
6344 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6345 if (rc != 0) {
6346 LOGE("pthread_cond_wait failed due to rc = %s",
6347 strerror(rc));
6348 break;
6349 }
6350 } else {
6351 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6352 if (rc != 0) {
6353 LOGE("pthread_cond_timedwait failed due to rc = %s",
6354 strerror(rc));
6355 break;
6356 }
6357 }
6358 }
6359 if (rc != 0) {
6360 mFlushPerf = false;
6361 pthread_mutex_unlock(&mMutex);
6362 return -ENODEV;
6363 }
6364
6365 LOGD("Received buffers, now safe to return them");
6366
6367 //make sure the channels handle flush
6368 //currently only required for the picture channel to release snapshot resources
6369 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6370 it != mStreamInfo.end(); it++) {
6371 QCamera3Channel *channel = (*it)->channel;
6372 if (channel) {
6373 rc = channel->flush();
6374 if (rc) {
6375 LOGE("Flushing the channels failed with error %d", rc);
6376 // even though the channel flush failed we need to continue and
6377 // return the buffers we have to the framework, however the return
6378 // value will be an error
6379 rc = -ENODEV;
6380 }
6381 }
6382 }
6383
6384 /* notify the frameworks and send errored results */
6385 rc = notifyErrorForPendingRequests();
6386 if (rc < 0) {
6387 LOGE("notifyErrorForPendingRequests failed");
6388 pthread_mutex_unlock(&mMutex);
6389 return rc;
6390 }
6391
6392 //unblock process_capture_request
6393 mPendingLiveRequest = 0;
6394 unblockRequestIfNecessary();
6395
6396 mFlushPerf = false;
6397 pthread_mutex_unlock(&mMutex);
6398 LOGD ("Flush Operation complete. rc = %d", rc);
6399 return rc;
6400}
6401
6402/*===========================================================================
6403 * FUNCTION : handleCameraDeviceError
6404 *
6405 * DESCRIPTION: This function calls internal flush and notifies the error to
6406 * framework and updates the state variable.
6407 *
6408 * PARAMETERS : None
6409 *
6410 * RETURN : NO_ERROR on Success
6411 * Error code on failure
6412 *==========================================================================*/
6413int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6414{
6415 int32_t rc = NO_ERROR;
6416
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006417 {
6418 Mutex::Autolock lock(mFlushLock);
6419 pthread_mutex_lock(&mMutex);
6420 if (mState != ERROR) {
6421 //if mState != ERROR, nothing to be done
6422 pthread_mutex_unlock(&mMutex);
6423 return NO_ERROR;
6424 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006425 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006426
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006427 rc = flush(false /* restart channels */);
6428 if (NO_ERROR != rc) {
6429 LOGE("internal flush to handle mState = ERROR failed");
6430 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006431
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006432 pthread_mutex_lock(&mMutex);
6433 mState = DEINIT;
6434 pthread_mutex_unlock(&mMutex);
6435 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006436
6437 camera3_notify_msg_t notify_msg;
6438 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6439 notify_msg.type = CAMERA3_MSG_ERROR;
6440 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6441 notify_msg.message.error.error_stream = NULL;
6442 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006443 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006444
6445 return rc;
6446}
6447
6448/*===========================================================================
6449 * FUNCTION : captureResultCb
6450 *
6451 * DESCRIPTION: Callback handler for all capture result
6452 * (streams, as well as metadata)
6453 *
6454 * PARAMETERS :
6455 * @metadata : metadata information
6456 * @buffer : actual gralloc buffer to be returned to frameworks.
6457 * NULL if metadata.
6458 *
6459 * RETURN : NONE
6460 *==========================================================================*/
6461void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6462 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6463{
6464 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006465 pthread_mutex_lock(&mMutex);
6466 uint8_t batchSize = mBatchSize;
6467 pthread_mutex_unlock(&mMutex);
6468 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006469 handleBatchMetadata(metadata_buf,
6470 true /* free_and_bufdone_meta_buf */);
6471 } else { /* mBatchSize = 0 */
6472 hdrPlusPerfLock(metadata_buf);
6473 pthread_mutex_lock(&mMutex);
6474 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006475 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006476 true /* last urgent frame of batch metadata */,
6477 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006478 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006479 pthread_mutex_unlock(&mMutex);
6480 }
6481 } else if (isInputBuffer) {
6482 pthread_mutex_lock(&mMutex);
6483 handleInputBufferWithLock(frame_number);
6484 pthread_mutex_unlock(&mMutex);
6485 } else {
6486 pthread_mutex_lock(&mMutex);
6487 handleBufferWithLock(buffer, frame_number);
6488 pthread_mutex_unlock(&mMutex);
6489 }
6490 return;
6491}
6492
6493/*===========================================================================
6494 * FUNCTION : getReprocessibleOutputStreamId
6495 *
6496 * DESCRIPTION: Get source output stream id for the input reprocess stream
6497 * based on size and format, which would be the largest
6498 * output stream if an input stream exists.
6499 *
6500 * PARAMETERS :
6501 * @id : return the stream id if found
6502 *
6503 * RETURN : int32_t type of status
6504 * NO_ERROR -- success
6505 * none-zero failure code
6506 *==========================================================================*/
6507int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6508{
6509 /* check if any output or bidirectional stream with the same size and format
6510 and return that stream */
6511 if ((mInputStreamInfo.dim.width > 0) &&
6512 (mInputStreamInfo.dim.height > 0)) {
6513 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6514 it != mStreamInfo.end(); it++) {
6515
6516 camera3_stream_t *stream = (*it)->stream;
6517 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6518 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6519 (stream->format == mInputStreamInfo.format)) {
6520 // Usage flag for an input stream and the source output stream
6521 // may be different.
6522 LOGD("Found reprocessible output stream! %p", *it);
6523 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6524 stream->usage, mInputStreamInfo.usage);
6525
6526 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6527 if (channel != NULL && channel->mStreams[0]) {
6528 id = channel->mStreams[0]->getMyServerID();
6529 return NO_ERROR;
6530 }
6531 }
6532 }
6533 } else {
6534 LOGD("No input stream, so no reprocessible output stream");
6535 }
6536 return NAME_NOT_FOUND;
6537}
6538
6539/*===========================================================================
6540 * FUNCTION : lookupFwkName
6541 *
6542 * DESCRIPTION: In case the enum is not same in fwk and backend
6543 * make sure the parameter is correctly propogated
6544 *
6545 * PARAMETERS :
6546 * @arr : map between the two enums
6547 * @len : len of the map
6548 * @hal_name : name of the hal_parm to map
6549 *
6550 * RETURN : int type of status
6551 * fwk_name -- success
6552 * none-zero failure code
6553 *==========================================================================*/
6554template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6555 size_t len, halType hal_name)
6556{
6557
6558 for (size_t i = 0; i < len; i++) {
6559 if (arr[i].hal_name == hal_name) {
6560 return arr[i].fwk_name;
6561 }
6562 }
6563
6564 /* Not able to find matching framework type is not necessarily
6565 * an error case. This happens when mm-camera supports more attributes
6566 * than the frameworks do */
6567 LOGH("Cannot find matching framework type");
6568 return NAME_NOT_FOUND;
6569}
6570
6571/*===========================================================================
6572 * FUNCTION : lookupHalName
6573 *
6574 * DESCRIPTION: In case the enum is not same in fwk and backend
6575 * make sure the parameter is correctly propogated
6576 *
6577 * PARAMETERS :
6578 * @arr : map between the two enums
6579 * @len : len of the map
6580 * @fwk_name : name of the hal_parm to map
6581 *
6582 * RETURN : int32_t type of status
6583 * hal_name -- success
6584 * none-zero failure code
6585 *==========================================================================*/
6586template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6587 size_t len, fwkType fwk_name)
6588{
6589 for (size_t i = 0; i < len; i++) {
6590 if (arr[i].fwk_name == fwk_name) {
6591 return arr[i].hal_name;
6592 }
6593 }
6594
6595 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6596 return NAME_NOT_FOUND;
6597}
6598
6599/*===========================================================================
6600 * FUNCTION : lookupProp
6601 *
6602 * DESCRIPTION: lookup a value by its name
6603 *
6604 * PARAMETERS :
6605 * @arr : map between the two enums
6606 * @len : size of the map
6607 * @name : name to be looked up
6608 *
6609 * RETURN : Value if found
6610 * CAM_CDS_MODE_MAX if not found
6611 *==========================================================================*/
6612template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6613 size_t len, const char *name)
6614{
6615 if (name) {
6616 for (size_t i = 0; i < len; i++) {
6617 if (!strcmp(arr[i].desc, name)) {
6618 return arr[i].val;
6619 }
6620 }
6621 }
6622 return CAM_CDS_MODE_MAX;
6623}
6624
6625/*===========================================================================
6626 *
6627 * DESCRIPTION:
6628 *
6629 * PARAMETERS :
6630 * @metadata : metadata information from callback
6631 * @timestamp: metadata buffer timestamp
6632 * @request_id: request id
6633 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006634 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006635 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6636 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006637 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006638 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6639 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006640 *
6641 * RETURN : camera_metadata_t*
6642 * metadata in a format specified by fwk
6643 *==========================================================================*/
6644camera_metadata_t*
6645QCamera3HardwareInterface::translateFromHalMetadata(
6646 metadata_buffer_t *metadata,
6647 nsecs_t timestamp,
6648 int32_t request_id,
6649 const CameraMetadata& jpegMetadata,
6650 uint8_t pipeline_depth,
6651 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006652 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006653 /* DevCamDebug metadata translateFromHalMetadata argument */
6654 uint8_t DevCamDebug_meta_enable,
6655 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006656 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006657 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006658 bool lastMetadataInBatch,
6659 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006660{
6661 CameraMetadata camMetadata;
6662 camera_metadata_t *resultMetadata;
6663
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006664 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006665 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6666 * Timestamp is needed because it's used for shutter notify calculation.
6667 * */
6668 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6669 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006670 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006671 }
6672
Thierry Strudel3d639192016-09-09 11:52:26 -07006673 if (jpegMetadata.entryCount())
6674 camMetadata.append(jpegMetadata);
6675
6676 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6677 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6678 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6679 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006680 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006681 if (mBatchSize == 0) {
6682 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6683 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6684 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006685
Samuel Ha68ba5172016-12-15 18:41:12 -08006686 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6687 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6688 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6689 // DevCamDebug metadata translateFromHalMetadata AF
6690 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6691 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6692 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6693 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6694 }
6695 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6696 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6697 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6698 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6699 }
6700 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6701 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6702 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6703 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6704 }
6705 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6706 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6707 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6708 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6709 }
6710 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6711 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6712 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6713 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6714 }
6715 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6716 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6717 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6718 *DevCamDebug_af_monitor_pdaf_target_pos;
6719 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6720 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6721 }
6722 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6723 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6724 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6725 *DevCamDebug_af_monitor_pdaf_confidence;
6726 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6727 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6728 }
6729 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6730 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6731 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6732 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6733 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6734 }
6735 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6736 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6737 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6738 *DevCamDebug_af_monitor_tof_target_pos;
6739 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6740 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6741 }
6742 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6743 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6744 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6745 *DevCamDebug_af_monitor_tof_confidence;
6746 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6747 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6748 }
6749 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6750 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6751 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6752 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6753 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6754 }
6755 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6756 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6757 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6758 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6759 &fwk_DevCamDebug_af_monitor_type_select, 1);
6760 }
6761 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6762 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6763 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6764 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6765 &fwk_DevCamDebug_af_monitor_refocus, 1);
6766 }
6767 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6768 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6769 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6770 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6771 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6772 }
6773 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6774 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6775 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6776 *DevCamDebug_af_search_pdaf_target_pos;
6777 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6778 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6779 }
6780 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6781 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6782 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6783 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6784 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6785 }
6786 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6787 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6788 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6789 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6790 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6791 }
6792 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6793 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6794 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6795 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6796 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6797 }
6798 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6799 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6800 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6801 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6802 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6803 }
6804 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6805 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6806 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6807 *DevCamDebug_af_search_tof_target_pos;
6808 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6809 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6810 }
6811 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6812 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6813 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6814 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6815 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6816 }
6817 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6818 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6819 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6820 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6821 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6822 }
6823 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6824 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6825 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6826 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6827 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6828 }
6829 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6830 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6831 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6832 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6833 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6834 }
6835 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6836 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6837 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6838 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6839 &fwk_DevCamDebug_af_search_type_select, 1);
6840 }
6841 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6842 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6843 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6844 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6845 &fwk_DevCamDebug_af_search_next_pos, 1);
6846 }
6847 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6848 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6849 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6850 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6851 &fwk_DevCamDebug_af_search_target_pos, 1);
6852 }
6853 // DevCamDebug metadata translateFromHalMetadata AEC
6854 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6855 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6856 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6857 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6858 }
6859 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6860 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6861 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6862 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6863 }
6864 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6865 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6866 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6867 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6868 }
6869 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6870 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6871 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6872 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6873 }
6874 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6875 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6876 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6877 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6878 }
6879 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6880 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6881 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6882 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6883 }
6884 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6885 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6886 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6887 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6888 }
6889 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6890 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6891 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6892 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6893 }
Samuel Ha34229982017-02-17 13:51:11 -08006894 // DevCamDebug metadata translateFromHalMetadata zzHDR
6895 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6896 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6897 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6898 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6899 }
6900 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6901 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006902 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006903 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6904 }
6905 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6906 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6907 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6908 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6909 }
6910 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6911 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006912 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006913 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6914 }
6915 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6916 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6917 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6918 *DevCamDebug_aec_hdr_sensitivity_ratio;
6919 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6920 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6921 }
6922 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6923 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6924 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6925 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6926 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6927 }
6928 // DevCamDebug metadata translateFromHalMetadata ADRC
6929 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6930 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6931 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6932 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6933 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6934 }
6935 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6936 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6937 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6938 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6939 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6940 }
6941 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6942 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6943 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6944 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6945 }
6946 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6947 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6948 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6949 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6950 }
6951 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6952 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6953 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6954 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6955 }
6956 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6957 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6958 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6959 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6960 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006961 // DevCamDebug metadata translateFromHalMetadata AWB
6962 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6963 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6964 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6965 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6966 }
6967 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6968 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6969 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6970 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6971 }
6972 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6973 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6974 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6975 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6976 }
6977 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6978 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6979 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6980 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6981 }
6982 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6983 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6984 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6985 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6986 }
6987 }
6988 // atrace_end(ATRACE_TAG_ALWAYS);
6989
Thierry Strudel3d639192016-09-09 11:52:26 -07006990 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6991 int64_t fwk_frame_number = *frame_number;
6992 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6993 }
6994
6995 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6996 int32_t fps_range[2];
6997 fps_range[0] = (int32_t)float_range->min_fps;
6998 fps_range[1] = (int32_t)float_range->max_fps;
6999 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7000 fps_range, 2);
7001 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7002 fps_range[0], fps_range[1]);
7003 }
7004
7005 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7006 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7007 }
7008
7009 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7010 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7011 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7012 *sceneMode);
7013 if (NAME_NOT_FOUND != val) {
7014 uint8_t fwkSceneMode = (uint8_t)val;
7015 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7016 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7017 fwkSceneMode);
7018 }
7019 }
7020
7021 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7022 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7023 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7024 }
7025
7026 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7027 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7028 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7029 }
7030
7031 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7032 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7033 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7034 }
7035
7036 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7037 CAM_INTF_META_EDGE_MODE, metadata) {
7038 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7039 }
7040
7041 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7042 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7043 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7044 }
7045
7046 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7047 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7048 }
7049
7050 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7051 if (0 <= *flashState) {
7052 uint8_t fwk_flashState = (uint8_t) *flashState;
7053 if (!gCamCapability[mCameraId]->flash_available) {
7054 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7055 }
7056 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7057 }
7058 }
7059
7060 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7061 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7062 if (NAME_NOT_FOUND != val) {
7063 uint8_t fwk_flashMode = (uint8_t)val;
7064 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7065 }
7066 }
7067
7068 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7069 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7070 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7071 }
7072
7073 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7074 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7075 }
7076
7077 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7078 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7079 }
7080
7081 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7082 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7083 }
7084
7085 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7086 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7087 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7088 }
7089
7090 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7091 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7092 LOGD("fwk_videoStab = %d", fwk_videoStab);
7093 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7094 } else {
7095 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7096 // and so hardcoding the Video Stab result to OFF mode.
7097 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7098 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007099 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007100 }
7101
7102 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7103 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7104 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7105 }
7106
7107 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7108 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7109 }
7110
Thierry Strudel3d639192016-09-09 11:52:26 -07007111 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7112 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007113 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007114
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007115 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7116 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007117
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007118 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007119 blackLevelAppliedPattern->cam_black_level[0],
7120 blackLevelAppliedPattern->cam_black_level[1],
7121 blackLevelAppliedPattern->cam_black_level[2],
7122 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007123 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7124 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007125
7126#ifndef USE_HAL_3_3
7127 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307128 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007129 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307130 fwk_blackLevelInd[0] /= 16.0;
7131 fwk_blackLevelInd[1] /= 16.0;
7132 fwk_blackLevelInd[2] /= 16.0;
7133 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007134 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7135 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007136#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007137 }
7138
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007139#ifndef USE_HAL_3_3
7140 // Fixed whitelevel is used by ISP/Sensor
7141 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7142 &gCamCapability[mCameraId]->white_level, 1);
7143#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007144
7145 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7146 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7147 int32_t scalerCropRegion[4];
7148 scalerCropRegion[0] = hScalerCropRegion->left;
7149 scalerCropRegion[1] = hScalerCropRegion->top;
7150 scalerCropRegion[2] = hScalerCropRegion->width;
7151 scalerCropRegion[3] = hScalerCropRegion->height;
7152
7153 // Adjust crop region from sensor output coordinate system to active
7154 // array coordinate system.
7155 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7156 scalerCropRegion[2], scalerCropRegion[3]);
7157
7158 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7159 }
7160
7161 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7162 LOGD("sensorExpTime = %lld", *sensorExpTime);
7163 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7164 }
7165
7166 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7167 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7168 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7169 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7170 }
7171
7172 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7173 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7174 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7175 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7176 sensorRollingShutterSkew, 1);
7177 }
7178
7179 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7180 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7181 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7182
7183 //calculate the noise profile based on sensitivity
7184 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7185 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7186 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7187 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7188 noise_profile[i] = noise_profile_S;
7189 noise_profile[i+1] = noise_profile_O;
7190 }
7191 LOGD("noise model entry (S, O) is (%f, %f)",
7192 noise_profile_S, noise_profile_O);
7193 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7194 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7195 }
7196
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007197#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007198 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007199 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007200 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007201 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007202 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7203 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7204 }
7205 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007206#endif
7207
Thierry Strudel3d639192016-09-09 11:52:26 -07007208 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7209 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7210 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7211 }
7212
7213 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7214 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7215 *faceDetectMode);
7216 if (NAME_NOT_FOUND != val) {
7217 uint8_t fwk_faceDetectMode = (uint8_t)val;
7218 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7219
7220 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7221 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7222 CAM_INTF_META_FACE_DETECTION, metadata) {
7223 uint8_t numFaces = MIN(
7224 faceDetectionInfo->num_faces_detected, MAX_ROI);
7225 int32_t faceIds[MAX_ROI];
7226 uint8_t faceScores[MAX_ROI];
7227 int32_t faceRectangles[MAX_ROI * 4];
7228 int32_t faceLandmarks[MAX_ROI * 6];
7229 size_t j = 0, k = 0;
7230
7231 for (size_t i = 0; i < numFaces; i++) {
7232 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7233 // Adjust crop region from sensor output coordinate system to active
7234 // array coordinate system.
7235 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7236 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7237 rect.width, rect.height);
7238
7239 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7240 faceRectangles+j, -1);
7241
Jason Lee8ce36fa2017-04-19 19:40:37 -07007242 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7243 "bottom-right (%d, %d)",
7244 faceDetectionInfo->frame_id, i,
7245 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7246 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7247
Thierry Strudel3d639192016-09-09 11:52:26 -07007248 j+= 4;
7249 }
7250 if (numFaces <= 0) {
7251 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7252 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7253 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7254 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7255 }
7256
7257 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7258 numFaces);
7259 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7260 faceRectangles, numFaces * 4U);
7261 if (fwk_faceDetectMode ==
7262 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7263 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7264 CAM_INTF_META_FACE_LANDMARK, metadata) {
7265
7266 for (size_t i = 0; i < numFaces; i++) {
7267 // Map the co-ordinate sensor output coordinate system to active
7268 // array coordinate system.
7269 mCropRegionMapper.toActiveArray(
7270 landmarks->face_landmarks[i].left_eye_center.x,
7271 landmarks->face_landmarks[i].left_eye_center.y);
7272 mCropRegionMapper.toActiveArray(
7273 landmarks->face_landmarks[i].right_eye_center.x,
7274 landmarks->face_landmarks[i].right_eye_center.y);
7275 mCropRegionMapper.toActiveArray(
7276 landmarks->face_landmarks[i].mouth_center.x,
7277 landmarks->face_landmarks[i].mouth_center.y);
7278
7279 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007280
7281 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7282 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7283 faceDetectionInfo->frame_id, i,
7284 faceLandmarks[k + LEFT_EYE_X],
7285 faceLandmarks[k + LEFT_EYE_Y],
7286 faceLandmarks[k + RIGHT_EYE_X],
7287 faceLandmarks[k + RIGHT_EYE_Y],
7288 faceLandmarks[k + MOUTH_X],
7289 faceLandmarks[k + MOUTH_Y]);
7290
Thierry Strudel04e026f2016-10-10 11:27:36 -07007291 k+= TOTAL_LANDMARK_INDICES;
7292 }
7293 } else {
7294 for (size_t i = 0; i < numFaces; i++) {
7295 setInvalidLandmarks(faceLandmarks+k);
7296 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007297 }
7298 }
7299
Jason Lee49619db2017-04-13 12:07:22 -07007300 for (size_t i = 0; i < numFaces; i++) {
7301 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7302
7303 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7304 faceDetectionInfo->frame_id, i, faceIds[i]);
7305 }
7306
Thierry Strudel3d639192016-09-09 11:52:26 -07007307 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7308 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7309 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007310 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007311 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7312 CAM_INTF_META_FACE_BLINK, metadata) {
7313 uint8_t detected[MAX_ROI];
7314 uint8_t degree[MAX_ROI * 2];
7315 for (size_t i = 0; i < numFaces; i++) {
7316 detected[i] = blinks->blink[i].blink_detected;
7317 degree[2 * i] = blinks->blink[i].left_blink;
7318 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007319
Jason Lee49619db2017-04-13 12:07:22 -07007320 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7321 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7322 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7323 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007324 }
7325 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7326 detected, numFaces);
7327 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7328 degree, numFaces * 2);
7329 }
7330 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7331 CAM_INTF_META_FACE_SMILE, metadata) {
7332 uint8_t degree[MAX_ROI];
7333 uint8_t confidence[MAX_ROI];
7334 for (size_t i = 0; i < numFaces; i++) {
7335 degree[i] = smiles->smile[i].smile_degree;
7336 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007337
Jason Lee49619db2017-04-13 12:07:22 -07007338 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7339 "smile_degree=%d, smile_score=%d",
7340 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007341 }
7342 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7343 degree, numFaces);
7344 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7345 confidence, numFaces);
7346 }
7347 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7348 CAM_INTF_META_FACE_GAZE, metadata) {
7349 int8_t angle[MAX_ROI];
7350 int32_t direction[MAX_ROI * 3];
7351 int8_t degree[MAX_ROI * 2];
7352 for (size_t i = 0; i < numFaces; i++) {
7353 angle[i] = gazes->gaze[i].gaze_angle;
7354 direction[3 * i] = gazes->gaze[i].updown_dir;
7355 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7356 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7357 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7358 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007359
7360 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7361 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7362 "left_right_gaze=%d, top_bottom_gaze=%d",
7363 faceDetectionInfo->frame_id, i, angle[i],
7364 direction[3 * i], direction[3 * i + 1],
7365 direction[3 * i + 2],
7366 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007367 }
7368 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7369 (uint8_t *)angle, numFaces);
7370 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7371 direction, numFaces * 3);
7372 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7373 (uint8_t *)degree, numFaces * 2);
7374 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007375 }
7376 }
7377 }
7378 }
7379
7380 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7381 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007382 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007383 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007384 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007385
Shuzhen Wang14415f52016-11-16 18:26:18 -08007386 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7387 histogramBins = *histBins;
7388 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7389 }
7390
7391 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007392 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7393 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007394 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007395
7396 switch (stats_data->type) {
7397 case CAM_HISTOGRAM_TYPE_BAYER:
7398 switch (stats_data->bayer_stats.data_type) {
7399 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007400 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7401 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007402 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007403 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7404 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007405 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007406 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7407 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007408 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007409 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007410 case CAM_STATS_CHANNEL_R:
7411 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007412 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7413 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007414 }
7415 break;
7416 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007417 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007418 break;
7419 }
7420
Shuzhen Wang14415f52016-11-16 18:26:18 -08007421 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007422 }
7423 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007424 }
7425
7426 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7427 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7428 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7429 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7430 }
7431
7432 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7433 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7434 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7435 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7436 }
7437
7438 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7439 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7440 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7441 CAM_MAX_SHADING_MAP_HEIGHT);
7442 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7443 CAM_MAX_SHADING_MAP_WIDTH);
7444 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7445 lensShadingMap->lens_shading, 4U * map_width * map_height);
7446 }
7447
7448 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7449 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7450 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7451 }
7452
7453 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7454 //Populate CAM_INTF_META_TONEMAP_CURVES
7455 /* ch0 = G, ch 1 = B, ch 2 = R*/
7456 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7457 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7458 tonemap->tonemap_points_cnt,
7459 CAM_MAX_TONEMAP_CURVE_SIZE);
7460 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7461 }
7462
7463 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7464 &tonemap->curves[0].tonemap_points[0][0],
7465 tonemap->tonemap_points_cnt * 2);
7466
7467 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7468 &tonemap->curves[1].tonemap_points[0][0],
7469 tonemap->tonemap_points_cnt * 2);
7470
7471 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7472 &tonemap->curves[2].tonemap_points[0][0],
7473 tonemap->tonemap_points_cnt * 2);
7474 }
7475
7476 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7477 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7478 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7479 CC_GAIN_MAX);
7480 }
7481
7482 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7483 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7484 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7485 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7486 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7487 }
7488
7489 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7490 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7491 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7492 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7493 toneCurve->tonemap_points_cnt,
7494 CAM_MAX_TONEMAP_CURVE_SIZE);
7495 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7496 }
7497 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7498 (float*)toneCurve->curve.tonemap_points,
7499 toneCurve->tonemap_points_cnt * 2);
7500 }
7501
7502 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7503 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7504 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7505 predColorCorrectionGains->gains, 4);
7506 }
7507
7508 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7509 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7510 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7511 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7512 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7513 }
7514
7515 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7516 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7517 }
7518
7519 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7520 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7521 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7522 }
7523
7524 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7525 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7526 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7527 }
7528
7529 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7530 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7531 *effectMode);
7532 if (NAME_NOT_FOUND != val) {
7533 uint8_t fwk_effectMode = (uint8_t)val;
7534 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7535 }
7536 }
7537
7538 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7539 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7540 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7541 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7542 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7543 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7544 }
7545 int32_t fwk_testPatternData[4];
7546 fwk_testPatternData[0] = testPatternData->r;
7547 fwk_testPatternData[3] = testPatternData->b;
7548 switch (gCamCapability[mCameraId]->color_arrangement) {
7549 case CAM_FILTER_ARRANGEMENT_RGGB:
7550 case CAM_FILTER_ARRANGEMENT_GRBG:
7551 fwk_testPatternData[1] = testPatternData->gr;
7552 fwk_testPatternData[2] = testPatternData->gb;
7553 break;
7554 case CAM_FILTER_ARRANGEMENT_GBRG:
7555 case CAM_FILTER_ARRANGEMENT_BGGR:
7556 fwk_testPatternData[2] = testPatternData->gr;
7557 fwk_testPatternData[1] = testPatternData->gb;
7558 break;
7559 default:
7560 LOGE("color arrangement %d is not supported",
7561 gCamCapability[mCameraId]->color_arrangement);
7562 break;
7563 }
7564 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7565 }
7566
7567 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7568 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7569 }
7570
7571 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7572 String8 str((const char *)gps_methods);
7573 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7574 }
7575
7576 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7577 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7578 }
7579
7580 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7581 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7582 }
7583
7584 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7585 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7586 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7587 }
7588
7589 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7590 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7591 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7592 }
7593
7594 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7595 int32_t fwk_thumb_size[2];
7596 fwk_thumb_size[0] = thumb_size->width;
7597 fwk_thumb_size[1] = thumb_size->height;
7598 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7599 }
7600
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007601 // Skip reprocess metadata if there is no input stream.
7602 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7603 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7604 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7605 privateData,
7606 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7607 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007608 }
7609
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007610 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007611 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007612 meteringMode, 1);
7613 }
7614
Thierry Strudel54dc9782017-02-15 12:12:10 -08007615 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7616 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7617 LOGD("hdr_scene_data: %d %f\n",
7618 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7619 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7620 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7621 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7622 &isHdr, 1);
7623 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7624 &isHdrConfidence, 1);
7625 }
7626
7627
7628
Thierry Strudel3d639192016-09-09 11:52:26 -07007629 if (metadata->is_tuning_params_valid) {
7630 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7631 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7632 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7633
7634
7635 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7636 sizeof(uint32_t));
7637 data += sizeof(uint32_t);
7638
7639 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7640 sizeof(uint32_t));
7641 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7642 data += sizeof(uint32_t);
7643
7644 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7645 sizeof(uint32_t));
7646 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7647 data += sizeof(uint32_t);
7648
7649 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7650 sizeof(uint32_t));
7651 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7652 data += sizeof(uint32_t);
7653
7654 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7655 sizeof(uint32_t));
7656 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7657 data += sizeof(uint32_t);
7658
7659 metadata->tuning_params.tuning_mod3_data_size = 0;
7660 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7661 sizeof(uint32_t));
7662 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7663 data += sizeof(uint32_t);
7664
7665 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7666 TUNING_SENSOR_DATA_MAX);
7667 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7668 count);
7669 data += count;
7670
7671 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7672 TUNING_VFE_DATA_MAX);
7673 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7674 count);
7675 data += count;
7676
7677 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7678 TUNING_CPP_DATA_MAX);
7679 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7680 count);
7681 data += count;
7682
7683 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7684 TUNING_CAC_DATA_MAX);
7685 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7686 count);
7687 data += count;
7688
7689 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7690 (int32_t *)(void *)tuning_meta_data_blob,
7691 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7692 }
7693
7694 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7695 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7696 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7697 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7698 NEUTRAL_COL_POINTS);
7699 }
7700
7701 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7702 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7703 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7704 }
7705
7706 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7707 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7708 // Adjust crop region from sensor output coordinate system to active
7709 // array coordinate system.
7710 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7711 hAeRegions->rect.width, hAeRegions->rect.height);
7712
7713 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7714 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7715 REGIONS_TUPLE_COUNT);
7716 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7717 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7718 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7719 hAeRegions->rect.height);
7720 }
7721
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007722 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7723 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7724 if (NAME_NOT_FOUND != val) {
7725 uint8_t fwkAfMode = (uint8_t)val;
7726 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7727 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7728 } else {
7729 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7730 val);
7731 }
7732 }
7733
Thierry Strudel3d639192016-09-09 11:52:26 -07007734 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7735 uint8_t fwk_afState = (uint8_t) *afState;
7736 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007737 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007738 }
7739
7740 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7741 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7742 }
7743
7744 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7745 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7746 }
7747
7748 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7749 uint8_t fwk_lensState = *lensState;
7750 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7751 }
7752
Thierry Strudel3d639192016-09-09 11:52:26 -07007753
7754 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007755 uint32_t ab_mode = *hal_ab_mode;
7756 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7757 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7758 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7759 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007760 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007761 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007762 if (NAME_NOT_FOUND != val) {
7763 uint8_t fwk_ab_mode = (uint8_t)val;
7764 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7765 }
7766 }
7767
7768 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7769 int val = lookupFwkName(SCENE_MODES_MAP,
7770 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7771 if (NAME_NOT_FOUND != val) {
7772 uint8_t fwkBestshotMode = (uint8_t)val;
7773 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7774 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7775 } else {
7776 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7777 }
7778 }
7779
7780 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7781 uint8_t fwk_mode = (uint8_t) *mode;
7782 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7783 }
7784
7785 /* Constant metadata values to be update*/
7786 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7787 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7788
7789 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7790 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7791
7792 int32_t hotPixelMap[2];
7793 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7794
7795 // CDS
7796 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7797 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7798 }
7799
Thierry Strudel04e026f2016-10-10 11:27:36 -07007800 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7801 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007802 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007803 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7804 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7805 } else {
7806 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7807 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007808
7809 if(fwk_hdr != curr_hdr_state) {
7810 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7811 if(fwk_hdr)
7812 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7813 else
7814 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7815 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007816 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7817 }
7818
Thierry Strudel54dc9782017-02-15 12:12:10 -08007819 //binning correction
7820 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7821 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7822 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7823 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7824 }
7825
Thierry Strudel04e026f2016-10-10 11:27:36 -07007826 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007827 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007828 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7829 int8_t is_ir_on = 0;
7830
7831 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7832 if(is_ir_on != curr_ir_state) {
7833 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7834 if(is_ir_on)
7835 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7836 else
7837 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7838 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007839 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007840 }
7841
Thierry Strudel269c81a2016-10-12 12:13:59 -07007842 // AEC SPEED
7843 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7844 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7845 }
7846
7847 // AWB SPEED
7848 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7849 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7850 }
7851
Thierry Strudel3d639192016-09-09 11:52:26 -07007852 // TNR
7853 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7854 uint8_t tnr_enable = tnr->denoise_enable;
7855 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007856 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7857 int8_t is_tnr_on = 0;
7858
7859 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7860 if(is_tnr_on != curr_tnr_state) {
7861 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7862 if(is_tnr_on)
7863 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7864 else
7865 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7866 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007867
7868 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7869 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7870 }
7871
7872 // Reprocess crop data
7873 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7874 uint8_t cnt = crop_data->num_of_streams;
7875 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7876 // mm-qcamera-daemon only posts crop_data for streams
7877 // not linked to pproc. So no valid crop metadata is not
7878 // necessarily an error case.
7879 LOGD("No valid crop metadata entries");
7880 } else {
7881 uint32_t reproc_stream_id;
7882 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7883 LOGD("No reprocessible stream found, ignore crop data");
7884 } else {
7885 int rc = NO_ERROR;
7886 Vector<int32_t> roi_map;
7887 int32_t *crop = new int32_t[cnt*4];
7888 if (NULL == crop) {
7889 rc = NO_MEMORY;
7890 }
7891 if (NO_ERROR == rc) {
7892 int32_t streams_found = 0;
7893 for (size_t i = 0; i < cnt; i++) {
7894 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7895 if (pprocDone) {
7896 // HAL already does internal reprocessing,
7897 // either via reprocessing before JPEG encoding,
7898 // or offline postprocessing for pproc bypass case.
7899 crop[0] = 0;
7900 crop[1] = 0;
7901 crop[2] = mInputStreamInfo.dim.width;
7902 crop[3] = mInputStreamInfo.dim.height;
7903 } else {
7904 crop[0] = crop_data->crop_info[i].crop.left;
7905 crop[1] = crop_data->crop_info[i].crop.top;
7906 crop[2] = crop_data->crop_info[i].crop.width;
7907 crop[3] = crop_data->crop_info[i].crop.height;
7908 }
7909 roi_map.add(crop_data->crop_info[i].roi_map.left);
7910 roi_map.add(crop_data->crop_info[i].roi_map.top);
7911 roi_map.add(crop_data->crop_info[i].roi_map.width);
7912 roi_map.add(crop_data->crop_info[i].roi_map.height);
7913 streams_found++;
7914 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7915 crop[0], crop[1], crop[2], crop[3]);
7916 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7917 crop_data->crop_info[i].roi_map.left,
7918 crop_data->crop_info[i].roi_map.top,
7919 crop_data->crop_info[i].roi_map.width,
7920 crop_data->crop_info[i].roi_map.height);
7921 break;
7922
7923 }
7924 }
7925 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7926 &streams_found, 1);
7927 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7928 crop, (size_t)(streams_found * 4));
7929 if (roi_map.array()) {
7930 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7931 roi_map.array(), roi_map.size());
7932 }
7933 }
7934 if (crop) {
7935 delete [] crop;
7936 }
7937 }
7938 }
7939 }
7940
7941 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7942 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7943 // so hardcoding the CAC result to OFF mode.
7944 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7945 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7946 } else {
7947 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7948 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7949 *cacMode);
7950 if (NAME_NOT_FOUND != val) {
7951 uint8_t resultCacMode = (uint8_t)val;
7952 // check whether CAC result from CB is equal to Framework set CAC mode
7953 // If not equal then set the CAC mode came in corresponding request
7954 if (fwk_cacMode != resultCacMode) {
7955 resultCacMode = fwk_cacMode;
7956 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007957 //Check if CAC is disabled by property
7958 if (m_cacModeDisabled) {
7959 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7960 }
7961
Thierry Strudel3d639192016-09-09 11:52:26 -07007962 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7963 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7964 } else {
7965 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7966 }
7967 }
7968 }
7969
7970 // Post blob of cam_cds_data through vendor tag.
7971 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7972 uint8_t cnt = cdsInfo->num_of_streams;
7973 cam_cds_data_t cdsDataOverride;
7974 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7975 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7976 cdsDataOverride.num_of_streams = 1;
7977 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7978 uint32_t reproc_stream_id;
7979 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7980 LOGD("No reprocessible stream found, ignore cds data");
7981 } else {
7982 for (size_t i = 0; i < cnt; i++) {
7983 if (cdsInfo->cds_info[i].stream_id ==
7984 reproc_stream_id) {
7985 cdsDataOverride.cds_info[0].cds_enable =
7986 cdsInfo->cds_info[i].cds_enable;
7987 break;
7988 }
7989 }
7990 }
7991 } else {
7992 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7993 }
7994 camMetadata.update(QCAMERA3_CDS_INFO,
7995 (uint8_t *)&cdsDataOverride,
7996 sizeof(cam_cds_data_t));
7997 }
7998
7999 // Ldaf calibration data
8000 if (!mLdafCalibExist) {
8001 IF_META_AVAILABLE(uint32_t, ldafCalib,
8002 CAM_INTF_META_LDAF_EXIF, metadata) {
8003 mLdafCalibExist = true;
8004 mLdafCalib[0] = ldafCalib[0];
8005 mLdafCalib[1] = ldafCalib[1];
8006 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8007 ldafCalib[0], ldafCalib[1]);
8008 }
8009 }
8010
Thierry Strudel54dc9782017-02-15 12:12:10 -08008011 // EXIF debug data through vendor tag
8012 /*
8013 * Mobicat Mask can assume 3 values:
8014 * 1 refers to Mobicat data,
8015 * 2 refers to Stats Debug and Exif Debug Data
8016 * 3 refers to Mobicat and Stats Debug Data
8017 * We want to make sure that we are sending Exif debug data
8018 * only when Mobicat Mask is 2.
8019 */
8020 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8021 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8022 (uint8_t *)(void *)mExifParams.debug_params,
8023 sizeof(mm_jpeg_debug_exif_params_t));
8024 }
8025
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008026 // Reprocess and DDM debug data through vendor tag
8027 cam_reprocess_info_t repro_info;
8028 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008029 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8030 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008031 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008032 }
8033 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8034 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008035 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008036 }
8037 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8038 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008039 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008040 }
8041 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8042 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008043 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008044 }
8045 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8046 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008047 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008048 }
8049 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008050 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008051 }
8052 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8053 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008054 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008055 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008056 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8057 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8058 }
8059 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8060 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8061 }
8062 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8063 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008064
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008065 // INSTANT AEC MODE
8066 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8067 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8068 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8069 }
8070
Shuzhen Wange763e802016-03-31 10:24:29 -07008071 // AF scene change
8072 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8073 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8074 }
8075
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008076 // Enable ZSL
8077 if (enableZsl != nullptr) {
8078 uint8_t value = *enableZsl ?
8079 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8080 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8081 }
8082
Xu Han821ea9c2017-05-23 09:00:40 -07008083 // OIS Data
8084 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8085 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8086 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8087 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8088 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8089 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8090 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8091 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8092 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8093 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8094 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8095 }
8096
Thierry Strudel3d639192016-09-09 11:52:26 -07008097 resultMetadata = camMetadata.release();
8098 return resultMetadata;
8099}
8100
8101/*===========================================================================
8102 * FUNCTION : saveExifParams
8103 *
8104 * DESCRIPTION:
8105 *
8106 * PARAMETERS :
8107 * @metadata : metadata information from callback
8108 *
8109 * RETURN : none
8110 *
8111 *==========================================================================*/
8112void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8113{
8114 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8115 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8116 if (mExifParams.debug_params) {
8117 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8118 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8119 }
8120 }
8121 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8122 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8123 if (mExifParams.debug_params) {
8124 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8125 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8126 }
8127 }
8128 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8129 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8130 if (mExifParams.debug_params) {
8131 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8132 mExifParams.debug_params->af_debug_params_valid = TRUE;
8133 }
8134 }
8135 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8136 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8137 if (mExifParams.debug_params) {
8138 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8139 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8140 }
8141 }
8142 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8143 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8144 if (mExifParams.debug_params) {
8145 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8146 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8147 }
8148 }
8149 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8150 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8151 if (mExifParams.debug_params) {
8152 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8153 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8154 }
8155 }
8156 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8157 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8158 if (mExifParams.debug_params) {
8159 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8160 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8161 }
8162 }
8163 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8164 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8165 if (mExifParams.debug_params) {
8166 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8167 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8168 }
8169 }
8170}
8171
8172/*===========================================================================
8173 * FUNCTION : get3AExifParams
8174 *
8175 * DESCRIPTION:
8176 *
8177 * PARAMETERS : none
8178 *
8179 *
8180 * RETURN : mm_jpeg_exif_params_t
8181 *
8182 *==========================================================================*/
8183mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8184{
8185 return mExifParams;
8186}
8187
8188/*===========================================================================
8189 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8190 *
8191 * DESCRIPTION:
8192 *
8193 * PARAMETERS :
8194 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008195 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8196 * urgent metadata in a batch. Always true for
8197 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008198 *
8199 * RETURN : camera_metadata_t*
8200 * metadata in a format specified by fwk
8201 *==========================================================================*/
8202camera_metadata_t*
8203QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008204 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008205{
8206 CameraMetadata camMetadata;
8207 camera_metadata_t *resultMetadata;
8208
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008209 if (!lastUrgentMetadataInBatch) {
8210 /* In batch mode, use empty metadata if this is not the last in batch
8211 */
8212 resultMetadata = allocate_camera_metadata(0, 0);
8213 return resultMetadata;
8214 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008215
8216 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8217 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8218 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8219 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8220 }
8221
8222 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8223 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8224 &aecTrigger->trigger, 1);
8225 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8226 &aecTrigger->trigger_id, 1);
8227 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8228 aecTrigger->trigger);
8229 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8230 aecTrigger->trigger_id);
8231 }
8232
8233 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8234 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8235 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8236 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8237 }
8238
Thierry Strudel3d639192016-09-09 11:52:26 -07008239 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8240 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8241 &af_trigger->trigger, 1);
8242 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8243 af_trigger->trigger);
8244 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8245 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8246 af_trigger->trigger_id);
8247 }
8248
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008249 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8250 /*af regions*/
8251 int32_t afRegions[REGIONS_TUPLE_COUNT];
8252 // Adjust crop region from sensor output coordinate system to active
8253 // array coordinate system.
8254 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8255 hAfRegions->rect.width, hAfRegions->rect.height);
8256
8257 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8258 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8259 REGIONS_TUPLE_COUNT);
8260 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8261 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8262 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8263 hAfRegions->rect.height);
8264 }
8265
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008266 // AF region confidence
8267 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8268 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8269 }
8270
Thierry Strudel3d639192016-09-09 11:52:26 -07008271 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8272 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8273 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8274 if (NAME_NOT_FOUND != val) {
8275 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8276 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8277 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8278 } else {
8279 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8280 }
8281 }
8282
8283 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8284 uint32_t aeMode = CAM_AE_MODE_MAX;
8285 int32_t flashMode = CAM_FLASH_MODE_MAX;
8286 int32_t redeye = -1;
8287 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8288 aeMode = *pAeMode;
8289 }
8290 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8291 flashMode = *pFlashMode;
8292 }
8293 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8294 redeye = *pRedeye;
8295 }
8296
8297 if (1 == redeye) {
8298 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8299 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8300 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8301 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8302 flashMode);
8303 if (NAME_NOT_FOUND != val) {
8304 fwk_aeMode = (uint8_t)val;
8305 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8306 } else {
8307 LOGE("Unsupported flash mode %d", flashMode);
8308 }
8309 } else if (aeMode == CAM_AE_MODE_ON) {
8310 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8311 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8312 } else if (aeMode == CAM_AE_MODE_OFF) {
8313 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8314 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008315 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8316 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8317 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008318 } else {
8319 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8320 "flashMode:%d, aeMode:%u!!!",
8321 redeye, flashMode, aeMode);
8322 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008323 if (mInstantAEC) {
8324 // Increment frame Idx count untill a bound reached for instant AEC.
8325 mInstantAecFrameIdxCount++;
8326 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8327 CAM_INTF_META_AEC_INFO, metadata) {
8328 LOGH("ae_params->settled = %d",ae_params->settled);
8329 // If AEC settled, or if number of frames reached bound value,
8330 // should reset instant AEC.
8331 if (ae_params->settled ||
8332 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8333 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8334 mInstantAEC = false;
8335 mResetInstantAEC = true;
8336 mInstantAecFrameIdxCount = 0;
8337 }
8338 }
8339 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008340 resultMetadata = camMetadata.release();
8341 return resultMetadata;
8342}
8343
8344/*===========================================================================
8345 * FUNCTION : dumpMetadataToFile
8346 *
8347 * DESCRIPTION: Dumps tuning metadata to file system
8348 *
8349 * PARAMETERS :
8350 * @meta : tuning metadata
8351 * @dumpFrameCount : current dump frame count
8352 * @enabled : Enable mask
8353 *
8354 *==========================================================================*/
8355void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8356 uint32_t &dumpFrameCount,
8357 bool enabled,
8358 const char *type,
8359 uint32_t frameNumber)
8360{
8361 //Some sanity checks
8362 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8363 LOGE("Tuning sensor data size bigger than expected %d: %d",
8364 meta.tuning_sensor_data_size,
8365 TUNING_SENSOR_DATA_MAX);
8366 return;
8367 }
8368
8369 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8370 LOGE("Tuning VFE data size bigger than expected %d: %d",
8371 meta.tuning_vfe_data_size,
8372 TUNING_VFE_DATA_MAX);
8373 return;
8374 }
8375
8376 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8377 LOGE("Tuning CPP data size bigger than expected %d: %d",
8378 meta.tuning_cpp_data_size,
8379 TUNING_CPP_DATA_MAX);
8380 return;
8381 }
8382
8383 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8384 LOGE("Tuning CAC data size bigger than expected %d: %d",
8385 meta.tuning_cac_data_size,
8386 TUNING_CAC_DATA_MAX);
8387 return;
8388 }
8389 //
8390
8391 if(enabled){
8392 char timeBuf[FILENAME_MAX];
8393 char buf[FILENAME_MAX];
8394 memset(buf, 0, sizeof(buf));
8395 memset(timeBuf, 0, sizeof(timeBuf));
8396 time_t current_time;
8397 struct tm * timeinfo;
8398 time (&current_time);
8399 timeinfo = localtime (&current_time);
8400 if (timeinfo != NULL) {
8401 strftime (timeBuf, sizeof(timeBuf),
8402 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8403 }
8404 String8 filePath(timeBuf);
8405 snprintf(buf,
8406 sizeof(buf),
8407 "%dm_%s_%d.bin",
8408 dumpFrameCount,
8409 type,
8410 frameNumber);
8411 filePath.append(buf);
8412 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8413 if (file_fd >= 0) {
8414 ssize_t written_len = 0;
8415 meta.tuning_data_version = TUNING_DATA_VERSION;
8416 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8417 written_len += write(file_fd, data, sizeof(uint32_t));
8418 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8419 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8420 written_len += write(file_fd, data, sizeof(uint32_t));
8421 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8422 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8423 written_len += write(file_fd, data, sizeof(uint32_t));
8424 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8425 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8426 written_len += write(file_fd, data, sizeof(uint32_t));
8427 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8428 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8429 written_len += write(file_fd, data, sizeof(uint32_t));
8430 meta.tuning_mod3_data_size = 0;
8431 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8432 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8433 written_len += write(file_fd, data, sizeof(uint32_t));
8434 size_t total_size = meta.tuning_sensor_data_size;
8435 data = (void *)((uint8_t *)&meta.data);
8436 written_len += write(file_fd, data, total_size);
8437 total_size = meta.tuning_vfe_data_size;
8438 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8439 written_len += write(file_fd, data, total_size);
8440 total_size = meta.tuning_cpp_data_size;
8441 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8442 written_len += write(file_fd, data, total_size);
8443 total_size = meta.tuning_cac_data_size;
8444 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8445 written_len += write(file_fd, data, total_size);
8446 close(file_fd);
8447 }else {
8448 LOGE("fail to open file for metadata dumping");
8449 }
8450 }
8451}
8452
8453/*===========================================================================
8454 * FUNCTION : cleanAndSortStreamInfo
8455 *
8456 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8457 * and sort them such that raw stream is at the end of the list
8458 * This is a workaround for camera daemon constraint.
8459 *
8460 * PARAMETERS : None
8461 *
8462 *==========================================================================*/
8463void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8464{
8465 List<stream_info_t *> newStreamInfo;
8466
8467 /*clean up invalid streams*/
8468 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8469 it != mStreamInfo.end();) {
8470 if(((*it)->status) == INVALID){
8471 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8472 delete channel;
8473 free(*it);
8474 it = mStreamInfo.erase(it);
8475 } else {
8476 it++;
8477 }
8478 }
8479
8480 // Move preview/video/callback/snapshot streams into newList
8481 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8482 it != mStreamInfo.end();) {
8483 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8484 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8485 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8486 newStreamInfo.push_back(*it);
8487 it = mStreamInfo.erase(it);
8488 } else
8489 it++;
8490 }
8491 // Move raw streams into newList
8492 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8493 it != mStreamInfo.end();) {
8494 newStreamInfo.push_back(*it);
8495 it = mStreamInfo.erase(it);
8496 }
8497
8498 mStreamInfo = newStreamInfo;
8499}
8500
8501/*===========================================================================
8502 * FUNCTION : extractJpegMetadata
8503 *
8504 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8505 * JPEG metadata is cached in HAL, and return as part of capture
8506 * result when metadata is returned from camera daemon.
8507 *
8508 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8509 * @request: capture request
8510 *
8511 *==========================================================================*/
8512void QCamera3HardwareInterface::extractJpegMetadata(
8513 CameraMetadata& jpegMetadata,
8514 const camera3_capture_request_t *request)
8515{
8516 CameraMetadata frame_settings;
8517 frame_settings = request->settings;
8518
8519 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8520 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8521 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8522 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8523
8524 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8525 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8526 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8527 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8528
8529 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8530 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8531 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8532 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8533
8534 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8535 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8536 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8537 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8538
8539 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8540 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8541 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8542 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8543
8544 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8545 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8546 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8547 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8548
8549 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8550 int32_t thumbnail_size[2];
8551 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8552 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8553 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8554 int32_t orientation =
8555 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008556 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008557 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8558 int32_t temp;
8559 temp = thumbnail_size[0];
8560 thumbnail_size[0] = thumbnail_size[1];
8561 thumbnail_size[1] = temp;
8562 }
8563 }
8564 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8565 thumbnail_size,
8566 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8567 }
8568
8569}
8570
8571/*===========================================================================
8572 * FUNCTION : convertToRegions
8573 *
8574 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8575 *
8576 * PARAMETERS :
8577 * @rect : cam_rect_t struct to convert
8578 * @region : int32_t destination array
8579 * @weight : if we are converting from cam_area_t, weight is valid
8580 * else weight = -1
8581 *
8582 *==========================================================================*/
8583void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8584 int32_t *region, int weight)
8585{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008586 region[FACE_LEFT] = rect.left;
8587 region[FACE_TOP] = rect.top;
8588 region[FACE_RIGHT] = rect.left + rect.width;
8589 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008590 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008591 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008592 }
8593}
8594
8595/*===========================================================================
8596 * FUNCTION : convertFromRegions
8597 *
8598 * DESCRIPTION: helper method to convert from array to cam_rect_t
8599 *
8600 * PARAMETERS :
8601 * @rect : cam_rect_t struct to convert
8602 * @region : int32_t destination array
8603 * @weight : if we are converting from cam_area_t, weight is valid
8604 * else weight = -1
8605 *
8606 *==========================================================================*/
8607void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008608 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008609{
Thierry Strudel3d639192016-09-09 11:52:26 -07008610 int32_t x_min = frame_settings.find(tag).data.i32[0];
8611 int32_t y_min = frame_settings.find(tag).data.i32[1];
8612 int32_t x_max = frame_settings.find(tag).data.i32[2];
8613 int32_t y_max = frame_settings.find(tag).data.i32[3];
8614 roi.weight = frame_settings.find(tag).data.i32[4];
8615 roi.rect.left = x_min;
8616 roi.rect.top = y_min;
8617 roi.rect.width = x_max - x_min;
8618 roi.rect.height = y_max - y_min;
8619}
8620
8621/*===========================================================================
8622 * FUNCTION : resetIfNeededROI
8623 *
8624 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8625 * crop region
8626 *
8627 * PARAMETERS :
8628 * @roi : cam_area_t struct to resize
8629 * @scalerCropRegion : cam_crop_region_t region to compare against
8630 *
8631 *
8632 *==========================================================================*/
8633bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8634 const cam_crop_region_t* scalerCropRegion)
8635{
8636 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8637 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8638 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8639 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8640
8641 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8642 * without having this check the calculations below to validate if the roi
8643 * is inside scalar crop region will fail resulting in the roi not being
8644 * reset causing algorithm to continue to use stale roi window
8645 */
8646 if (roi->weight == 0) {
8647 return true;
8648 }
8649
8650 if ((roi_x_max < scalerCropRegion->left) ||
8651 // right edge of roi window is left of scalar crop's left edge
8652 (roi_y_max < scalerCropRegion->top) ||
8653 // bottom edge of roi window is above scalar crop's top edge
8654 (roi->rect.left > crop_x_max) ||
8655 // left edge of roi window is beyond(right) of scalar crop's right edge
8656 (roi->rect.top > crop_y_max)){
8657 // top edge of roi windo is above scalar crop's top edge
8658 return false;
8659 }
8660 if (roi->rect.left < scalerCropRegion->left) {
8661 roi->rect.left = scalerCropRegion->left;
8662 }
8663 if (roi->rect.top < scalerCropRegion->top) {
8664 roi->rect.top = scalerCropRegion->top;
8665 }
8666 if (roi_x_max > crop_x_max) {
8667 roi_x_max = crop_x_max;
8668 }
8669 if (roi_y_max > crop_y_max) {
8670 roi_y_max = crop_y_max;
8671 }
8672 roi->rect.width = roi_x_max - roi->rect.left;
8673 roi->rect.height = roi_y_max - roi->rect.top;
8674 return true;
8675}
8676
8677/*===========================================================================
8678 * FUNCTION : convertLandmarks
8679 *
8680 * DESCRIPTION: helper method to extract the landmarks from face detection info
8681 *
8682 * PARAMETERS :
8683 * @landmark_data : input landmark data to be converted
8684 * @landmarks : int32_t destination array
8685 *
8686 *
8687 *==========================================================================*/
8688void QCamera3HardwareInterface::convertLandmarks(
8689 cam_face_landmarks_info_t landmark_data,
8690 int32_t *landmarks)
8691{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008692 if (landmark_data.is_left_eye_valid) {
8693 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8694 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8695 } else {
8696 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8697 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8698 }
8699
8700 if (landmark_data.is_right_eye_valid) {
8701 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8702 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8703 } else {
8704 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8705 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8706 }
8707
8708 if (landmark_data.is_mouth_valid) {
8709 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8710 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8711 } else {
8712 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8713 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8714 }
8715}
8716
8717/*===========================================================================
8718 * FUNCTION : setInvalidLandmarks
8719 *
8720 * DESCRIPTION: helper method to set invalid landmarks
8721 *
8722 * PARAMETERS :
8723 * @landmarks : int32_t destination array
8724 *
8725 *
8726 *==========================================================================*/
8727void QCamera3HardwareInterface::setInvalidLandmarks(
8728 int32_t *landmarks)
8729{
8730 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8731 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8732 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8733 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8734 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8735 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008736}
8737
8738#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008739
8740/*===========================================================================
8741 * FUNCTION : getCapabilities
8742 *
8743 * DESCRIPTION: query camera capability from back-end
8744 *
8745 * PARAMETERS :
8746 * @ops : mm-interface ops structure
8747 * @cam_handle : camera handle for which we need capability
8748 *
8749 * RETURN : ptr type of capability structure
8750 * capability for success
8751 * NULL for failure
8752 *==========================================================================*/
8753cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8754 uint32_t cam_handle)
8755{
8756 int rc = NO_ERROR;
8757 QCamera3HeapMemory *capabilityHeap = NULL;
8758 cam_capability_t *cap_ptr = NULL;
8759
8760 if (ops == NULL) {
8761 LOGE("Invalid arguments");
8762 return NULL;
8763 }
8764
8765 capabilityHeap = new QCamera3HeapMemory(1);
8766 if (capabilityHeap == NULL) {
8767 LOGE("creation of capabilityHeap failed");
8768 return NULL;
8769 }
8770
8771 /* Allocate memory for capability buffer */
8772 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8773 if(rc != OK) {
8774 LOGE("No memory for cappability");
8775 goto allocate_failed;
8776 }
8777
8778 /* Map memory for capability buffer */
8779 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8780
8781 rc = ops->map_buf(cam_handle,
8782 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8783 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8784 if(rc < 0) {
8785 LOGE("failed to map capability buffer");
8786 rc = FAILED_TRANSACTION;
8787 goto map_failed;
8788 }
8789
8790 /* Query Capability */
8791 rc = ops->query_capability(cam_handle);
8792 if(rc < 0) {
8793 LOGE("failed to query capability");
8794 rc = FAILED_TRANSACTION;
8795 goto query_failed;
8796 }
8797
8798 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8799 if (cap_ptr == NULL) {
8800 LOGE("out of memory");
8801 rc = NO_MEMORY;
8802 goto query_failed;
8803 }
8804
8805 memset(cap_ptr, 0, sizeof(cam_capability_t));
8806 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8807
8808 int index;
8809 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8810 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8811 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8812 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8813 }
8814
8815query_failed:
8816 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8817map_failed:
8818 capabilityHeap->deallocate();
8819allocate_failed:
8820 delete capabilityHeap;
8821
8822 if (rc != NO_ERROR) {
8823 return NULL;
8824 } else {
8825 return cap_ptr;
8826 }
8827}
8828
Thierry Strudel3d639192016-09-09 11:52:26 -07008829/*===========================================================================
8830 * FUNCTION : initCapabilities
8831 *
8832 * DESCRIPTION: initialize camera capabilities in static data struct
8833 *
8834 * PARAMETERS :
8835 * @cameraId : camera Id
8836 *
8837 * RETURN : int32_t type of status
8838 * NO_ERROR -- success
8839 * none-zero failure code
8840 *==========================================================================*/
8841int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8842{
8843 int rc = 0;
8844 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008845 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008846
8847 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8848 if (rc) {
8849 LOGE("camera_open failed. rc = %d", rc);
8850 goto open_failed;
8851 }
8852 if (!cameraHandle) {
8853 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8854 goto open_failed;
8855 }
8856
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008857 handle = get_main_camera_handle(cameraHandle->camera_handle);
8858 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8859 if (gCamCapability[cameraId] == NULL) {
8860 rc = FAILED_TRANSACTION;
8861 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008862 }
8863
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008864 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008865 if (is_dual_camera_by_idx(cameraId)) {
8866 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8867 gCamCapability[cameraId]->aux_cam_cap =
8868 getCapabilities(cameraHandle->ops, handle);
8869 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8870 rc = FAILED_TRANSACTION;
8871 free(gCamCapability[cameraId]);
8872 goto failed_op;
8873 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008874
8875 // Copy the main camera capability to main_cam_cap struct
8876 gCamCapability[cameraId]->main_cam_cap =
8877 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8878 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8879 LOGE("out of memory");
8880 rc = NO_MEMORY;
8881 goto failed_op;
8882 }
8883 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8884 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008885 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008886failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008887 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8888 cameraHandle = NULL;
8889open_failed:
8890 return rc;
8891}
8892
8893/*==========================================================================
8894 * FUNCTION : get3Aversion
8895 *
8896 * DESCRIPTION: get the Q3A S/W version
8897 *
8898 * PARAMETERS :
8899 * @sw_version: Reference of Q3A structure which will hold version info upon
8900 * return
8901 *
8902 * RETURN : None
8903 *
8904 *==========================================================================*/
8905void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8906{
8907 if(gCamCapability[mCameraId])
8908 sw_version = gCamCapability[mCameraId]->q3a_version;
8909 else
8910 LOGE("Capability structure NULL!");
8911}
8912
8913
8914/*===========================================================================
8915 * FUNCTION : initParameters
8916 *
8917 * DESCRIPTION: initialize camera parameters
8918 *
8919 * PARAMETERS :
8920 *
8921 * RETURN : int32_t type of status
8922 * NO_ERROR -- success
8923 * none-zero failure code
8924 *==========================================================================*/
8925int QCamera3HardwareInterface::initParameters()
8926{
8927 int rc = 0;
8928
8929 //Allocate Set Param Buffer
8930 mParamHeap = new QCamera3HeapMemory(1);
8931 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8932 if(rc != OK) {
8933 rc = NO_MEMORY;
8934 LOGE("Failed to allocate SETPARM Heap memory");
8935 delete mParamHeap;
8936 mParamHeap = NULL;
8937 return rc;
8938 }
8939
8940 //Map memory for parameters buffer
8941 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8942 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8943 mParamHeap->getFd(0),
8944 sizeof(metadata_buffer_t),
8945 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8946 if(rc < 0) {
8947 LOGE("failed to map SETPARM buffer");
8948 rc = FAILED_TRANSACTION;
8949 mParamHeap->deallocate();
8950 delete mParamHeap;
8951 mParamHeap = NULL;
8952 return rc;
8953 }
8954
8955 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8956
8957 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8958 return rc;
8959}
8960
8961/*===========================================================================
8962 * FUNCTION : deinitParameters
8963 *
8964 * DESCRIPTION: de-initialize camera parameters
8965 *
8966 * PARAMETERS :
8967 *
8968 * RETURN : NONE
8969 *==========================================================================*/
8970void QCamera3HardwareInterface::deinitParameters()
8971{
8972 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8973 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8974
8975 mParamHeap->deallocate();
8976 delete mParamHeap;
8977 mParamHeap = NULL;
8978
8979 mParameters = NULL;
8980
8981 free(mPrevParameters);
8982 mPrevParameters = NULL;
8983}
8984
8985/*===========================================================================
8986 * FUNCTION : calcMaxJpegSize
8987 *
8988 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8989 *
8990 * PARAMETERS :
8991 *
8992 * RETURN : max_jpeg_size
8993 *==========================================================================*/
8994size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8995{
8996 size_t max_jpeg_size = 0;
8997 size_t temp_width, temp_height;
8998 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8999 MAX_SIZES_CNT);
9000 for (size_t i = 0; i < count; i++) {
9001 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9002 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9003 if (temp_width * temp_height > max_jpeg_size ) {
9004 max_jpeg_size = temp_width * temp_height;
9005 }
9006 }
9007 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9008 return max_jpeg_size;
9009}
9010
9011/*===========================================================================
9012 * FUNCTION : getMaxRawSize
9013 *
9014 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9015 *
9016 * PARAMETERS :
9017 *
9018 * RETURN : Largest supported Raw Dimension
9019 *==========================================================================*/
9020cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9021{
9022 int max_width = 0;
9023 cam_dimension_t maxRawSize;
9024
9025 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9026 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9027 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9028 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9029 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9030 }
9031 }
9032 return maxRawSize;
9033}
9034
9035
9036/*===========================================================================
9037 * FUNCTION : calcMaxJpegDim
9038 *
9039 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9040 *
9041 * PARAMETERS :
9042 *
9043 * RETURN : max_jpeg_dim
9044 *==========================================================================*/
9045cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9046{
9047 cam_dimension_t max_jpeg_dim;
9048 cam_dimension_t curr_jpeg_dim;
9049 max_jpeg_dim.width = 0;
9050 max_jpeg_dim.height = 0;
9051 curr_jpeg_dim.width = 0;
9052 curr_jpeg_dim.height = 0;
9053 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9054 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9055 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9056 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9057 max_jpeg_dim.width * max_jpeg_dim.height ) {
9058 max_jpeg_dim.width = curr_jpeg_dim.width;
9059 max_jpeg_dim.height = curr_jpeg_dim.height;
9060 }
9061 }
9062 return max_jpeg_dim;
9063}
9064
9065/*===========================================================================
9066 * FUNCTION : addStreamConfig
9067 *
9068 * DESCRIPTION: adds the stream configuration to the array
9069 *
9070 * PARAMETERS :
9071 * @available_stream_configs : pointer to stream configuration array
9072 * @scalar_format : scalar format
9073 * @dim : configuration dimension
9074 * @config_type : input or output configuration type
9075 *
9076 * RETURN : NONE
9077 *==========================================================================*/
9078void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9079 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9080{
9081 available_stream_configs.add(scalar_format);
9082 available_stream_configs.add(dim.width);
9083 available_stream_configs.add(dim.height);
9084 available_stream_configs.add(config_type);
9085}
9086
9087/*===========================================================================
9088 * FUNCTION : suppportBurstCapture
9089 *
9090 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9091 *
9092 * PARAMETERS :
9093 * @cameraId : camera Id
9094 *
9095 * RETURN : true if camera supports BURST_CAPTURE
9096 * false otherwise
9097 *==========================================================================*/
9098bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9099{
9100 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9101 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9102 const int32_t highResWidth = 3264;
9103 const int32_t highResHeight = 2448;
9104
9105 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9106 // Maximum resolution images cannot be captured at >= 10fps
9107 // -> not supporting BURST_CAPTURE
9108 return false;
9109 }
9110
9111 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9112 // Maximum resolution images can be captured at >= 20fps
9113 // --> supporting BURST_CAPTURE
9114 return true;
9115 }
9116
9117 // Find the smallest highRes resolution, or largest resolution if there is none
9118 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9119 MAX_SIZES_CNT);
9120 size_t highRes = 0;
9121 while ((highRes + 1 < totalCnt) &&
9122 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9123 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9124 highResWidth * highResHeight)) {
9125 highRes++;
9126 }
9127 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9128 return true;
9129 } else {
9130 return false;
9131 }
9132}
9133
9134/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009135 * FUNCTION : getPDStatIndex
9136 *
9137 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9138 *
9139 * PARAMETERS :
9140 * @caps : camera capabilities
9141 *
9142 * RETURN : int32_t type
9143 * non-negative - on success
9144 * -1 - on failure
9145 *==========================================================================*/
9146int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9147 if (nullptr == caps) {
9148 return -1;
9149 }
9150
9151 uint32_t metaRawCount = caps->meta_raw_channel_count;
9152 int32_t ret = -1;
9153 for (size_t i = 0; i < metaRawCount; i++) {
9154 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9155 ret = i;
9156 break;
9157 }
9158 }
9159
9160 return ret;
9161}
9162
9163/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009164 * FUNCTION : initStaticMetadata
9165 *
9166 * DESCRIPTION: initialize the static metadata
9167 *
9168 * PARAMETERS :
9169 * @cameraId : camera Id
9170 *
9171 * RETURN : int32_t type of status
9172 * 0 -- success
9173 * non-zero failure code
9174 *==========================================================================*/
9175int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9176{
9177 int rc = 0;
9178 CameraMetadata staticInfo;
9179 size_t count = 0;
9180 bool limitedDevice = false;
9181 char prop[PROPERTY_VALUE_MAX];
9182 bool supportBurst = false;
9183
9184 supportBurst = supportBurstCapture(cameraId);
9185
9186 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9187 * guaranteed or if min fps of max resolution is less than 20 fps, its
9188 * advertised as limited device*/
9189 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9190 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9191 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9192 !supportBurst;
9193
9194 uint8_t supportedHwLvl = limitedDevice ?
9195 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009196#ifndef USE_HAL_3_3
9197 // LEVEL_3 - This device will support level 3.
9198 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9199#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009200 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009201#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009202
9203 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9204 &supportedHwLvl, 1);
9205
9206 bool facingBack = false;
9207 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9208 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9209 facingBack = true;
9210 }
9211 /*HAL 3 only*/
9212 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9213 &gCamCapability[cameraId]->min_focus_distance, 1);
9214
9215 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9216 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9217
9218 /*should be using focal lengths but sensor doesn't provide that info now*/
9219 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9220 &gCamCapability[cameraId]->focal_length,
9221 1);
9222
9223 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9224 gCamCapability[cameraId]->apertures,
9225 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9226
9227 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9228 gCamCapability[cameraId]->filter_densities,
9229 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9230
9231
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009232 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9233 size_t mode_count =
9234 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9235 for (size_t i = 0; i < mode_count; i++) {
9236 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9237 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009238 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009239 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009240
9241 int32_t lens_shading_map_size[] = {
9242 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9243 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9244 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9245 lens_shading_map_size,
9246 sizeof(lens_shading_map_size)/sizeof(int32_t));
9247
9248 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9249 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9250
9251 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9252 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9253
9254 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9255 &gCamCapability[cameraId]->max_frame_duration, 1);
9256
9257 camera_metadata_rational baseGainFactor = {
9258 gCamCapability[cameraId]->base_gain_factor.numerator,
9259 gCamCapability[cameraId]->base_gain_factor.denominator};
9260 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9261 &baseGainFactor, 1);
9262
9263 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9264 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9265
9266 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9267 gCamCapability[cameraId]->pixel_array_size.height};
9268 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9269 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9270
9271 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9272 gCamCapability[cameraId]->active_array_size.top,
9273 gCamCapability[cameraId]->active_array_size.width,
9274 gCamCapability[cameraId]->active_array_size.height};
9275 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9276 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9277
9278 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9279 &gCamCapability[cameraId]->white_level, 1);
9280
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009281 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9282 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9283 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009284 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009285 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009286
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009287#ifndef USE_HAL_3_3
9288 bool hasBlackRegions = false;
9289 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9290 LOGW("black_region_count: %d is bounded to %d",
9291 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9292 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9293 }
9294 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9295 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9296 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9297 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9298 }
9299 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9300 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9301 hasBlackRegions = true;
9302 }
9303#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009304 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9305 &gCamCapability[cameraId]->flash_charge_duration, 1);
9306
9307 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9308 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9309
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009310 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9311 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9312 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009313 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9314 &timestampSource, 1);
9315
Thierry Strudel54dc9782017-02-15 12:12:10 -08009316 //update histogram vendor data
9317 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009318 &gCamCapability[cameraId]->histogram_size, 1);
9319
Thierry Strudel54dc9782017-02-15 12:12:10 -08009320 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009321 &gCamCapability[cameraId]->max_histogram_count, 1);
9322
Shuzhen Wang14415f52016-11-16 18:26:18 -08009323 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9324 //so that app can request fewer number of bins than the maximum supported.
9325 std::vector<int32_t> histBins;
9326 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9327 histBins.push_back(maxHistBins);
9328 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9329 (maxHistBins & 0x1) == 0) {
9330 histBins.push_back(maxHistBins >> 1);
9331 maxHistBins >>= 1;
9332 }
9333 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9334 histBins.data(), histBins.size());
9335
Thierry Strudel3d639192016-09-09 11:52:26 -07009336 int32_t sharpness_map_size[] = {
9337 gCamCapability[cameraId]->sharpness_map_size.width,
9338 gCamCapability[cameraId]->sharpness_map_size.height};
9339
9340 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9341 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9342
9343 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9344 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9345
Emilian Peev0f3c3162017-03-15 12:57:46 +00009346 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9347 if (0 <= indexPD) {
9348 // Advertise PD stats data as part of the Depth capabilities
9349 int32_t depthWidth =
9350 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9351 int32_t depthHeight =
9352 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009353 int32_t depthStride =
9354 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009355 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9356 assert(0 < depthSamplesCount);
9357 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9358 &depthSamplesCount, 1);
9359
9360 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9361 depthHeight,
9362 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9363 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9364 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9365 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9366 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9367
9368 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9369 depthHeight, 33333333,
9370 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9371 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9372 depthMinDuration,
9373 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9374
9375 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9376 depthHeight, 0,
9377 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9378 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9379 depthStallDuration,
9380 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9381
9382 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9383 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009384
9385 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9386 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9387 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009388 }
9389
Thierry Strudel3d639192016-09-09 11:52:26 -07009390 int32_t scalar_formats[] = {
9391 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9392 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9393 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9394 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9395 HAL_PIXEL_FORMAT_RAW10,
9396 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009397 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9398 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9399 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009400
9401 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9402 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9403 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9404 count, MAX_SIZES_CNT, available_processed_sizes);
9405 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9406 available_processed_sizes, count * 2);
9407
9408 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9409 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9410 makeTable(gCamCapability[cameraId]->raw_dim,
9411 count, MAX_SIZES_CNT, available_raw_sizes);
9412 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9413 available_raw_sizes, count * 2);
9414
9415 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9416 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9417 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9418 count, MAX_SIZES_CNT, available_fps_ranges);
9419 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9420 available_fps_ranges, count * 2);
9421
9422 camera_metadata_rational exposureCompensationStep = {
9423 gCamCapability[cameraId]->exp_compensation_step.numerator,
9424 gCamCapability[cameraId]->exp_compensation_step.denominator};
9425 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9426 &exposureCompensationStep, 1);
9427
9428 Vector<uint8_t> availableVstabModes;
9429 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9430 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009431 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009432 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009433 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009434 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009435 count = IS_TYPE_MAX;
9436 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9437 for (size_t i = 0; i < count; i++) {
9438 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9439 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9440 eisSupported = true;
9441 break;
9442 }
9443 }
9444 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009445 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9446 }
9447 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9448 availableVstabModes.array(), availableVstabModes.size());
9449
9450 /*HAL 1 and HAL 3 common*/
9451 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9452 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9453 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009454 // Cap the max zoom to the max preferred value
9455 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009456 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9457 &maxZoom, 1);
9458
9459 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9460 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9461
9462 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9463 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9464 max3aRegions[2] = 0; /* AF not supported */
9465 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9466 max3aRegions, 3);
9467
9468 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9469 memset(prop, 0, sizeof(prop));
9470 property_get("persist.camera.facedetect", prop, "1");
9471 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9472 LOGD("Support face detection mode: %d",
9473 supportedFaceDetectMode);
9474
9475 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009476 /* support mode should be OFF if max number of face is 0 */
9477 if (maxFaces <= 0) {
9478 supportedFaceDetectMode = 0;
9479 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009480 Vector<uint8_t> availableFaceDetectModes;
9481 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9482 if (supportedFaceDetectMode == 1) {
9483 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9484 } else if (supportedFaceDetectMode == 2) {
9485 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9486 } else if (supportedFaceDetectMode == 3) {
9487 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9488 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9489 } else {
9490 maxFaces = 0;
9491 }
9492 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9493 availableFaceDetectModes.array(),
9494 availableFaceDetectModes.size());
9495 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9496 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009497 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9498 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9499 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009500
9501 int32_t exposureCompensationRange[] = {
9502 gCamCapability[cameraId]->exposure_compensation_min,
9503 gCamCapability[cameraId]->exposure_compensation_max};
9504 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9505 exposureCompensationRange,
9506 sizeof(exposureCompensationRange)/sizeof(int32_t));
9507
9508 uint8_t lensFacing = (facingBack) ?
9509 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9510 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9511
9512 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9513 available_thumbnail_sizes,
9514 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9515
9516 /*all sizes will be clubbed into this tag*/
9517 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9518 /*android.scaler.availableStreamConfigurations*/
9519 Vector<int32_t> available_stream_configs;
9520 cam_dimension_t active_array_dim;
9521 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9522 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009523
9524 /*advertise list of input dimensions supported based on below property.
9525 By default all sizes upto 5MP will be advertised.
9526 Note that the setprop resolution format should be WxH.
9527 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9528 To list all supported sizes, setprop needs to be set with "0x0" */
9529 cam_dimension_t minInputSize = {2592,1944}; //5MP
9530 memset(prop, 0, sizeof(prop));
9531 property_get("persist.camera.input.minsize", prop, "2592x1944");
9532 if (strlen(prop) > 0) {
9533 char *saveptr = NULL;
9534 char *token = strtok_r(prop, "x", &saveptr);
9535 if (token != NULL) {
9536 minInputSize.width = atoi(token);
9537 }
9538 token = strtok_r(NULL, "x", &saveptr);
9539 if (token != NULL) {
9540 minInputSize.height = atoi(token);
9541 }
9542 }
9543
Thierry Strudel3d639192016-09-09 11:52:26 -07009544 /* Add input/output stream configurations for each scalar formats*/
9545 for (size_t j = 0; j < scalar_formats_count; j++) {
9546 switch (scalar_formats[j]) {
9547 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9548 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9549 case HAL_PIXEL_FORMAT_RAW10:
9550 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9551 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9552 addStreamConfig(available_stream_configs, scalar_formats[j],
9553 gCamCapability[cameraId]->raw_dim[i],
9554 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9555 }
9556 break;
9557 case HAL_PIXEL_FORMAT_BLOB:
9558 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9559 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9560 addStreamConfig(available_stream_configs, scalar_formats[j],
9561 gCamCapability[cameraId]->picture_sizes_tbl[i],
9562 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9563 }
9564 break;
9565 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9566 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9567 default:
9568 cam_dimension_t largest_picture_size;
9569 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9570 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9571 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9572 addStreamConfig(available_stream_configs, scalar_formats[j],
9573 gCamCapability[cameraId]->picture_sizes_tbl[i],
9574 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009575 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009576 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9577 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009578 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9579 >= minInputSize.width) || (gCamCapability[cameraId]->
9580 picture_sizes_tbl[i].height >= minInputSize.height)) {
9581 addStreamConfig(available_stream_configs, scalar_formats[j],
9582 gCamCapability[cameraId]->picture_sizes_tbl[i],
9583 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9584 }
9585 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009586 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009587
Thierry Strudel3d639192016-09-09 11:52:26 -07009588 break;
9589 }
9590 }
9591
9592 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9593 available_stream_configs.array(), available_stream_configs.size());
9594 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9595 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9596
9597 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9598 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9599
9600 /* android.scaler.availableMinFrameDurations */
9601 Vector<int64_t> available_min_durations;
9602 for (size_t j = 0; j < scalar_formats_count; j++) {
9603 switch (scalar_formats[j]) {
9604 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9605 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9606 case HAL_PIXEL_FORMAT_RAW10:
9607 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9608 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9609 available_min_durations.add(scalar_formats[j]);
9610 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9611 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9612 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9613 }
9614 break;
9615 default:
9616 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9617 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9618 available_min_durations.add(scalar_formats[j]);
9619 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9620 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9621 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9622 }
9623 break;
9624 }
9625 }
9626 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9627 available_min_durations.array(), available_min_durations.size());
9628
9629 Vector<int32_t> available_hfr_configs;
9630 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9631 int32_t fps = 0;
9632 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9633 case CAM_HFR_MODE_60FPS:
9634 fps = 60;
9635 break;
9636 case CAM_HFR_MODE_90FPS:
9637 fps = 90;
9638 break;
9639 case CAM_HFR_MODE_120FPS:
9640 fps = 120;
9641 break;
9642 case CAM_HFR_MODE_150FPS:
9643 fps = 150;
9644 break;
9645 case CAM_HFR_MODE_180FPS:
9646 fps = 180;
9647 break;
9648 case CAM_HFR_MODE_210FPS:
9649 fps = 210;
9650 break;
9651 case CAM_HFR_MODE_240FPS:
9652 fps = 240;
9653 break;
9654 case CAM_HFR_MODE_480FPS:
9655 fps = 480;
9656 break;
9657 case CAM_HFR_MODE_OFF:
9658 case CAM_HFR_MODE_MAX:
9659 default:
9660 break;
9661 }
9662
9663 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9664 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9665 /* For each HFR frame rate, need to advertise one variable fps range
9666 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9667 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9668 * set by the app. When video recording is started, [120, 120] is
9669 * set. This way sensor configuration does not change when recording
9670 * is started */
9671
9672 /* (width, height, fps_min, fps_max, batch_size_max) */
9673 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9674 j < MAX_SIZES_CNT; j++) {
9675 available_hfr_configs.add(
9676 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9677 available_hfr_configs.add(
9678 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9679 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9680 available_hfr_configs.add(fps);
9681 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9682
9683 /* (width, height, fps_min, fps_max, batch_size_max) */
9684 available_hfr_configs.add(
9685 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9686 available_hfr_configs.add(
9687 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9688 available_hfr_configs.add(fps);
9689 available_hfr_configs.add(fps);
9690 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9691 }
9692 }
9693 }
9694 //Advertise HFR capability only if the property is set
9695 memset(prop, 0, sizeof(prop));
9696 property_get("persist.camera.hal3hfr.enable", prop, "1");
9697 uint8_t hfrEnable = (uint8_t)atoi(prop);
9698
9699 if(hfrEnable && available_hfr_configs.array()) {
9700 staticInfo.update(
9701 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9702 available_hfr_configs.array(), available_hfr_configs.size());
9703 }
9704
9705 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9706 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9707 &max_jpeg_size, 1);
9708
9709 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9710 size_t size = 0;
9711 count = CAM_EFFECT_MODE_MAX;
9712 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9713 for (size_t i = 0; i < count; i++) {
9714 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9715 gCamCapability[cameraId]->supported_effects[i]);
9716 if (NAME_NOT_FOUND != val) {
9717 avail_effects[size] = (uint8_t)val;
9718 size++;
9719 }
9720 }
9721 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9722 avail_effects,
9723 size);
9724
9725 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9726 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9727 size_t supported_scene_modes_cnt = 0;
9728 count = CAM_SCENE_MODE_MAX;
9729 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9730 for (size_t i = 0; i < count; i++) {
9731 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9732 CAM_SCENE_MODE_OFF) {
9733 int val = lookupFwkName(SCENE_MODES_MAP,
9734 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9735 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009736
Thierry Strudel3d639192016-09-09 11:52:26 -07009737 if (NAME_NOT_FOUND != val) {
9738 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9739 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9740 supported_scene_modes_cnt++;
9741 }
9742 }
9743 }
9744 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9745 avail_scene_modes,
9746 supported_scene_modes_cnt);
9747
9748 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9749 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9750 supported_scene_modes_cnt,
9751 CAM_SCENE_MODE_MAX,
9752 scene_mode_overrides,
9753 supported_indexes,
9754 cameraId);
9755
9756 if (supported_scene_modes_cnt == 0) {
9757 supported_scene_modes_cnt = 1;
9758 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9759 }
9760
9761 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9762 scene_mode_overrides, supported_scene_modes_cnt * 3);
9763
9764 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9765 ANDROID_CONTROL_MODE_AUTO,
9766 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9767 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9768 available_control_modes,
9769 3);
9770
9771 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9772 size = 0;
9773 count = CAM_ANTIBANDING_MODE_MAX;
9774 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9775 for (size_t i = 0; i < count; i++) {
9776 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9777 gCamCapability[cameraId]->supported_antibandings[i]);
9778 if (NAME_NOT_FOUND != val) {
9779 avail_antibanding_modes[size] = (uint8_t)val;
9780 size++;
9781 }
9782
9783 }
9784 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9785 avail_antibanding_modes,
9786 size);
9787
9788 uint8_t avail_abberation_modes[] = {
9789 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9790 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9791 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9792 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9793 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9794 if (0 == count) {
9795 // If no aberration correction modes are available for a device, this advertise OFF mode
9796 size = 1;
9797 } else {
9798 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9799 // So, advertize all 3 modes if atleast any one mode is supported as per the
9800 // new M requirement
9801 size = 3;
9802 }
9803 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9804 avail_abberation_modes,
9805 size);
9806
9807 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9808 size = 0;
9809 count = CAM_FOCUS_MODE_MAX;
9810 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9811 for (size_t i = 0; i < count; i++) {
9812 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9813 gCamCapability[cameraId]->supported_focus_modes[i]);
9814 if (NAME_NOT_FOUND != val) {
9815 avail_af_modes[size] = (uint8_t)val;
9816 size++;
9817 }
9818 }
9819 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9820 avail_af_modes,
9821 size);
9822
9823 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9824 size = 0;
9825 count = CAM_WB_MODE_MAX;
9826 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9827 for (size_t i = 0; i < count; i++) {
9828 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9829 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9830 gCamCapability[cameraId]->supported_white_balances[i]);
9831 if (NAME_NOT_FOUND != val) {
9832 avail_awb_modes[size] = (uint8_t)val;
9833 size++;
9834 }
9835 }
9836 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9837 avail_awb_modes,
9838 size);
9839
9840 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9841 count = CAM_FLASH_FIRING_LEVEL_MAX;
9842 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9843 count);
9844 for (size_t i = 0; i < count; i++) {
9845 available_flash_levels[i] =
9846 gCamCapability[cameraId]->supported_firing_levels[i];
9847 }
9848 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9849 available_flash_levels, count);
9850
9851 uint8_t flashAvailable;
9852 if (gCamCapability[cameraId]->flash_available)
9853 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9854 else
9855 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9856 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9857 &flashAvailable, 1);
9858
9859 Vector<uint8_t> avail_ae_modes;
9860 count = CAM_AE_MODE_MAX;
9861 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9862 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009863 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9864 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9865 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9866 }
9867 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009868 }
9869 if (flashAvailable) {
9870 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9871 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9872 }
9873 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9874 avail_ae_modes.array(),
9875 avail_ae_modes.size());
9876
9877 int32_t sensitivity_range[2];
9878 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9879 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9880 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9881 sensitivity_range,
9882 sizeof(sensitivity_range) / sizeof(int32_t));
9883
9884 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9885 &gCamCapability[cameraId]->max_analog_sensitivity,
9886 1);
9887
9888 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9889 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9890 &sensor_orientation,
9891 1);
9892
9893 int32_t max_output_streams[] = {
9894 MAX_STALLING_STREAMS,
9895 MAX_PROCESSED_STREAMS,
9896 MAX_RAW_STREAMS};
9897 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9898 max_output_streams,
9899 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9900
9901 uint8_t avail_leds = 0;
9902 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9903 &avail_leds, 0);
9904
9905 uint8_t focus_dist_calibrated;
9906 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9907 gCamCapability[cameraId]->focus_dist_calibrated);
9908 if (NAME_NOT_FOUND != val) {
9909 focus_dist_calibrated = (uint8_t)val;
9910 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9911 &focus_dist_calibrated, 1);
9912 }
9913
9914 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9915 size = 0;
9916 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9917 MAX_TEST_PATTERN_CNT);
9918 for (size_t i = 0; i < count; i++) {
9919 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9920 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9921 if (NAME_NOT_FOUND != testpatternMode) {
9922 avail_testpattern_modes[size] = testpatternMode;
9923 size++;
9924 }
9925 }
9926 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9927 avail_testpattern_modes,
9928 size);
9929
9930 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9931 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9932 &max_pipeline_depth,
9933 1);
9934
9935 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9936 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9937 &partial_result_count,
9938 1);
9939
9940 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9941 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9942
9943 Vector<uint8_t> available_capabilities;
9944 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9945 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9946 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9947 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9948 if (supportBurst) {
9949 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9950 }
9951 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9952 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9953 if (hfrEnable && available_hfr_configs.array()) {
9954 available_capabilities.add(
9955 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9956 }
9957
9958 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9959 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9960 }
9961 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9962 available_capabilities.array(),
9963 available_capabilities.size());
9964
9965 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9966 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9967 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9968 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9969
9970 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9971 &aeLockAvailable, 1);
9972
9973 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9974 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9975 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9976 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9977
9978 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9979 &awbLockAvailable, 1);
9980
9981 int32_t max_input_streams = 1;
9982 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9983 &max_input_streams,
9984 1);
9985
9986 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9987 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9988 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9989 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9990 HAL_PIXEL_FORMAT_YCbCr_420_888};
9991 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9992 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9993
9994 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9995 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9996 &max_latency,
9997 1);
9998
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009999#ifndef USE_HAL_3_3
10000 int32_t isp_sensitivity_range[2];
10001 isp_sensitivity_range[0] =
10002 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10003 isp_sensitivity_range[1] =
10004 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10005 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10006 isp_sensitivity_range,
10007 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10008#endif
10009
Thierry Strudel3d639192016-09-09 11:52:26 -070010010 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10011 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10012 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10013 available_hot_pixel_modes,
10014 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10015
10016 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10017 ANDROID_SHADING_MODE_FAST,
10018 ANDROID_SHADING_MODE_HIGH_QUALITY};
10019 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10020 available_shading_modes,
10021 3);
10022
10023 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10024 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10025 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10026 available_lens_shading_map_modes,
10027 2);
10028
10029 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10030 ANDROID_EDGE_MODE_FAST,
10031 ANDROID_EDGE_MODE_HIGH_QUALITY,
10032 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10033 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10034 available_edge_modes,
10035 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10036
10037 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10038 ANDROID_NOISE_REDUCTION_MODE_FAST,
10039 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10040 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10041 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10042 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10043 available_noise_red_modes,
10044 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10045
10046 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10047 ANDROID_TONEMAP_MODE_FAST,
10048 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10049 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10050 available_tonemap_modes,
10051 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10052
10053 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10054 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10055 available_hot_pixel_map_modes,
10056 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10057
10058 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10059 gCamCapability[cameraId]->reference_illuminant1);
10060 if (NAME_NOT_FOUND != val) {
10061 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10062 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10063 }
10064
10065 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10066 gCamCapability[cameraId]->reference_illuminant2);
10067 if (NAME_NOT_FOUND != val) {
10068 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10069 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10070 }
10071
10072 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10073 (void *)gCamCapability[cameraId]->forward_matrix1,
10074 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10075
10076 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10077 (void *)gCamCapability[cameraId]->forward_matrix2,
10078 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10079
10080 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10081 (void *)gCamCapability[cameraId]->color_transform1,
10082 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10083
10084 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10085 (void *)gCamCapability[cameraId]->color_transform2,
10086 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10087
10088 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10089 (void *)gCamCapability[cameraId]->calibration_transform1,
10090 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10091
10092 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10093 (void *)gCamCapability[cameraId]->calibration_transform2,
10094 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10095
10096 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10097 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10098 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10099 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10100 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10101 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10102 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10103 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10104 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10105 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10106 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10107 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10108 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10109 ANDROID_JPEG_GPS_COORDINATES,
10110 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10111 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10112 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10113 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10114 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10115 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10116 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10117 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10118 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10119 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010120#ifndef USE_HAL_3_3
10121 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10122#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010123 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010124 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010125 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10126 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010127 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010128 /* DevCamDebug metadata request_keys_basic */
10129 DEVCAMDEBUG_META_ENABLE,
10130 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010131 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010132 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010133 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010134 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010135 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010136 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010137
10138 size_t request_keys_cnt =
10139 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10140 Vector<int32_t> available_request_keys;
10141 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10142 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10143 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10144 }
10145
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010146 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010147 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010148 }
10149
Thierry Strudel3d639192016-09-09 11:52:26 -070010150 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10151 available_request_keys.array(), available_request_keys.size());
10152
10153 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10154 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10155 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10156 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10157 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10158 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10159 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10160 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10161 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10162 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10163 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10164 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10165 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10166 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10167 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10168 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10169 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010170 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010171 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10172 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10173 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010174 ANDROID_STATISTICS_FACE_SCORES,
10175#ifndef USE_HAL_3_3
10176 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10177#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010178 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010179 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010180 // DevCamDebug metadata result_keys_basic
10181 DEVCAMDEBUG_META_ENABLE,
10182 // DevCamDebug metadata result_keys AF
10183 DEVCAMDEBUG_AF_LENS_POSITION,
10184 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10185 DEVCAMDEBUG_AF_TOF_DISTANCE,
10186 DEVCAMDEBUG_AF_LUMA,
10187 DEVCAMDEBUG_AF_HAF_STATE,
10188 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10189 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10190 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10191 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10192 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10193 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10194 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10195 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10196 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10197 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10198 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10199 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10200 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10201 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10202 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10203 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10204 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10205 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10206 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10207 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10208 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10209 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10210 // DevCamDebug metadata result_keys AEC
10211 DEVCAMDEBUG_AEC_TARGET_LUMA,
10212 DEVCAMDEBUG_AEC_COMP_LUMA,
10213 DEVCAMDEBUG_AEC_AVG_LUMA,
10214 DEVCAMDEBUG_AEC_CUR_LUMA,
10215 DEVCAMDEBUG_AEC_LINECOUNT,
10216 DEVCAMDEBUG_AEC_REAL_GAIN,
10217 DEVCAMDEBUG_AEC_EXP_INDEX,
10218 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010219 // DevCamDebug metadata result_keys zzHDR
10220 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10221 DEVCAMDEBUG_AEC_L_LINECOUNT,
10222 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10223 DEVCAMDEBUG_AEC_S_LINECOUNT,
10224 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10225 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10226 // DevCamDebug metadata result_keys ADRC
10227 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10228 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10229 DEVCAMDEBUG_AEC_GTM_RATIO,
10230 DEVCAMDEBUG_AEC_LTM_RATIO,
10231 DEVCAMDEBUG_AEC_LA_RATIO,
10232 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010233 // DevCamDebug metadata result_keys AWB
10234 DEVCAMDEBUG_AWB_R_GAIN,
10235 DEVCAMDEBUG_AWB_G_GAIN,
10236 DEVCAMDEBUG_AWB_B_GAIN,
10237 DEVCAMDEBUG_AWB_CCT,
10238 DEVCAMDEBUG_AWB_DECISION,
10239 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010240 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10241 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10242 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010243 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010244 };
10245
Thierry Strudel3d639192016-09-09 11:52:26 -070010246 size_t result_keys_cnt =
10247 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10248
10249 Vector<int32_t> available_result_keys;
10250 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10251 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10252 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10253 }
10254 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10255 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10256 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10257 }
10258 if (supportedFaceDetectMode == 1) {
10259 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10260 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10261 } else if ((supportedFaceDetectMode == 2) ||
10262 (supportedFaceDetectMode == 3)) {
10263 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10264 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10265 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010266#ifndef USE_HAL_3_3
10267 if (hasBlackRegions) {
10268 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10269 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10270 }
10271#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010272
10273 if (gExposeEnableZslKey) {
10274 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10275 }
10276
Thierry Strudel3d639192016-09-09 11:52:26 -070010277 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10278 available_result_keys.array(), available_result_keys.size());
10279
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010280 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010281 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10282 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10283 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10284 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10285 ANDROID_SCALER_CROPPING_TYPE,
10286 ANDROID_SYNC_MAX_LATENCY,
10287 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10288 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10289 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10290 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10291 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10292 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10293 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10294 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10295 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10296 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10297 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10298 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10299 ANDROID_LENS_FACING,
10300 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10301 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10302 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10303 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10304 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10305 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10306 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10307 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10308 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10309 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10310 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10311 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10312 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10313 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10314 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10315 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10316 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10317 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10318 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10319 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010320 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010321 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10322 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10323 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10324 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10325 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10326 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10327 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10328 ANDROID_CONTROL_AVAILABLE_MODES,
10329 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10330 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10331 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10332 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010333 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10334#ifndef USE_HAL_3_3
10335 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10336 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10337#endif
10338 };
10339
10340 Vector<int32_t> available_characteristics_keys;
10341 available_characteristics_keys.appendArray(characteristics_keys_basic,
10342 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10343#ifndef USE_HAL_3_3
10344 if (hasBlackRegions) {
10345 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10346 }
10347#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010348
10349 if (0 <= indexPD) {
10350 int32_t depthKeys[] = {
10351 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10352 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10353 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10354 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10355 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10356 };
10357 available_characteristics_keys.appendArray(depthKeys,
10358 sizeof(depthKeys) / sizeof(depthKeys[0]));
10359 }
10360
Thierry Strudel3d639192016-09-09 11:52:26 -070010361 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010362 available_characteristics_keys.array(),
10363 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010364
10365 /*available stall durations depend on the hw + sw and will be different for different devices */
10366 /*have to add for raw after implementation*/
10367 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10368 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10369
10370 Vector<int64_t> available_stall_durations;
10371 for (uint32_t j = 0; j < stall_formats_count; j++) {
10372 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10373 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10374 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10375 available_stall_durations.add(stall_formats[j]);
10376 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10377 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10378 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10379 }
10380 } else {
10381 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10382 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10383 available_stall_durations.add(stall_formats[j]);
10384 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10385 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10386 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10387 }
10388 }
10389 }
10390 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10391 available_stall_durations.array(),
10392 available_stall_durations.size());
10393
10394 //QCAMERA3_OPAQUE_RAW
10395 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10396 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10397 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10398 case LEGACY_RAW:
10399 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10400 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10401 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10402 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10403 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10404 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10405 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10406 break;
10407 case MIPI_RAW:
10408 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10409 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10410 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10411 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10412 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10413 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10414 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10415 break;
10416 default:
10417 LOGE("unknown opaque_raw_format %d",
10418 gCamCapability[cameraId]->opaque_raw_fmt);
10419 break;
10420 }
10421 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10422
10423 Vector<int32_t> strides;
10424 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10425 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10426 cam_stream_buf_plane_info_t buf_planes;
10427 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10428 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10429 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10430 &gCamCapability[cameraId]->padding_info, &buf_planes);
10431 strides.add(buf_planes.plane_info.mp[0].stride);
10432 }
10433 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10434 strides.size());
10435
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010436 //TBD: remove the following line once backend advertises zzHDR in feature mask
10437 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010438 //Video HDR default
10439 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10440 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010441 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010442 int32_t vhdr_mode[] = {
10443 QCAMERA3_VIDEO_HDR_MODE_OFF,
10444 QCAMERA3_VIDEO_HDR_MODE_ON};
10445
10446 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10447 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10448 vhdr_mode, vhdr_mode_count);
10449 }
10450
Thierry Strudel3d639192016-09-09 11:52:26 -070010451 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10452 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10453 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10454
10455 uint8_t isMonoOnly =
10456 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10457 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10458 &isMonoOnly, 1);
10459
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010460#ifndef USE_HAL_3_3
10461 Vector<int32_t> opaque_size;
10462 for (size_t j = 0; j < scalar_formats_count; j++) {
10463 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10464 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10465 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10466 cam_stream_buf_plane_info_t buf_planes;
10467
10468 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10469 &gCamCapability[cameraId]->padding_info, &buf_planes);
10470
10471 if (rc == 0) {
10472 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10473 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10474 opaque_size.add(buf_planes.plane_info.frame_len);
10475 }else {
10476 LOGE("raw frame calculation failed!");
10477 }
10478 }
10479 }
10480 }
10481
10482 if ((opaque_size.size() > 0) &&
10483 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10484 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10485 else
10486 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10487#endif
10488
Thierry Strudel04e026f2016-10-10 11:27:36 -070010489 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10490 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10491 size = 0;
10492 count = CAM_IR_MODE_MAX;
10493 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10494 for (size_t i = 0; i < count; i++) {
10495 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10496 gCamCapability[cameraId]->supported_ir_modes[i]);
10497 if (NAME_NOT_FOUND != val) {
10498 avail_ir_modes[size] = (int32_t)val;
10499 size++;
10500 }
10501 }
10502 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10503 avail_ir_modes, size);
10504 }
10505
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010506 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10507 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10508 size = 0;
10509 count = CAM_AEC_CONVERGENCE_MAX;
10510 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10511 for (size_t i = 0; i < count; i++) {
10512 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10513 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10514 if (NAME_NOT_FOUND != val) {
10515 available_instant_aec_modes[size] = (int32_t)val;
10516 size++;
10517 }
10518 }
10519 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10520 available_instant_aec_modes, size);
10521 }
10522
Thierry Strudel54dc9782017-02-15 12:12:10 -080010523 int32_t sharpness_range[] = {
10524 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10525 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10526 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10527
10528 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10529 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10530 size = 0;
10531 count = CAM_BINNING_CORRECTION_MODE_MAX;
10532 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10533 for (size_t i = 0; i < count; i++) {
10534 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10535 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10536 gCamCapability[cameraId]->supported_binning_modes[i]);
10537 if (NAME_NOT_FOUND != val) {
10538 avail_binning_modes[size] = (int32_t)val;
10539 size++;
10540 }
10541 }
10542 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10543 avail_binning_modes, size);
10544 }
10545
10546 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10547 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10548 size = 0;
10549 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10550 for (size_t i = 0; i < count; i++) {
10551 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10552 gCamCapability[cameraId]->supported_aec_modes[i]);
10553 if (NAME_NOT_FOUND != val)
10554 available_aec_modes[size++] = val;
10555 }
10556 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10557 available_aec_modes, size);
10558 }
10559
10560 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10561 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10562 size = 0;
10563 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10564 for (size_t i = 0; i < count; i++) {
10565 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10566 gCamCapability[cameraId]->supported_iso_modes[i]);
10567 if (NAME_NOT_FOUND != val)
10568 available_iso_modes[size++] = val;
10569 }
10570 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10571 available_iso_modes, size);
10572 }
10573
10574 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010575 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010576 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10577 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10578 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10579
10580 int32_t available_saturation_range[4];
10581 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10582 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10583 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10584 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10585 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10586 available_saturation_range, 4);
10587
10588 uint8_t is_hdr_values[2];
10589 is_hdr_values[0] = 0;
10590 is_hdr_values[1] = 1;
10591 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10592 is_hdr_values, 2);
10593
10594 float is_hdr_confidence_range[2];
10595 is_hdr_confidence_range[0] = 0.0;
10596 is_hdr_confidence_range[1] = 1.0;
10597 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10598 is_hdr_confidence_range, 2);
10599
Emilian Peev0a972ef2017-03-16 10:25:53 +000010600 size_t eepromLength = strnlen(
10601 reinterpret_cast<const char *>(
10602 gCamCapability[cameraId]->eeprom_version_info),
10603 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10604 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010605 char easelInfo[] = ",E:N";
10606 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10607 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10608 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010609 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10610 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010611 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010612 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010613 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10614 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10615 }
10616
Thierry Strudel3d639192016-09-09 11:52:26 -070010617 gStaticMetadata[cameraId] = staticInfo.release();
10618 return rc;
10619}
10620
10621/*===========================================================================
10622 * FUNCTION : makeTable
10623 *
10624 * DESCRIPTION: make a table of sizes
10625 *
10626 * PARAMETERS :
10627 *
10628 *
10629 *==========================================================================*/
10630void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10631 size_t max_size, int32_t *sizeTable)
10632{
10633 size_t j = 0;
10634 if (size > max_size) {
10635 size = max_size;
10636 }
10637 for (size_t i = 0; i < size; i++) {
10638 sizeTable[j] = dimTable[i].width;
10639 sizeTable[j+1] = dimTable[i].height;
10640 j+=2;
10641 }
10642}
10643
10644/*===========================================================================
10645 * FUNCTION : makeFPSTable
10646 *
10647 * DESCRIPTION: make a table of fps ranges
10648 *
10649 * PARAMETERS :
10650 *
10651 *==========================================================================*/
10652void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10653 size_t max_size, int32_t *fpsRangesTable)
10654{
10655 size_t j = 0;
10656 if (size > max_size) {
10657 size = max_size;
10658 }
10659 for (size_t i = 0; i < size; i++) {
10660 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10661 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10662 j+=2;
10663 }
10664}
10665
10666/*===========================================================================
10667 * FUNCTION : makeOverridesList
10668 *
10669 * DESCRIPTION: make a list of scene mode overrides
10670 *
10671 * PARAMETERS :
10672 *
10673 *
10674 *==========================================================================*/
10675void QCamera3HardwareInterface::makeOverridesList(
10676 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10677 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10678{
10679 /*daemon will give a list of overrides for all scene modes.
10680 However we should send the fwk only the overrides for the scene modes
10681 supported by the framework*/
10682 size_t j = 0;
10683 if (size > max_size) {
10684 size = max_size;
10685 }
10686 size_t focus_count = CAM_FOCUS_MODE_MAX;
10687 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10688 focus_count);
10689 for (size_t i = 0; i < size; i++) {
10690 bool supt = false;
10691 size_t index = supported_indexes[i];
10692 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10693 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10694 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10695 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10696 overridesTable[index].awb_mode);
10697 if (NAME_NOT_FOUND != val) {
10698 overridesList[j+1] = (uint8_t)val;
10699 }
10700 uint8_t focus_override = overridesTable[index].af_mode;
10701 for (size_t k = 0; k < focus_count; k++) {
10702 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10703 supt = true;
10704 break;
10705 }
10706 }
10707 if (supt) {
10708 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10709 focus_override);
10710 if (NAME_NOT_FOUND != val) {
10711 overridesList[j+2] = (uint8_t)val;
10712 }
10713 } else {
10714 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10715 }
10716 j+=3;
10717 }
10718}
10719
10720/*===========================================================================
10721 * FUNCTION : filterJpegSizes
10722 *
10723 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10724 * could be downscaled to
10725 *
10726 * PARAMETERS :
10727 *
10728 * RETURN : length of jpegSizes array
10729 *==========================================================================*/
10730
10731size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10732 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10733 uint8_t downscale_factor)
10734{
10735 if (0 == downscale_factor) {
10736 downscale_factor = 1;
10737 }
10738
10739 int32_t min_width = active_array_size.width / downscale_factor;
10740 int32_t min_height = active_array_size.height / downscale_factor;
10741 size_t jpegSizesCnt = 0;
10742 if (processedSizesCnt > maxCount) {
10743 processedSizesCnt = maxCount;
10744 }
10745 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10746 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10747 jpegSizes[jpegSizesCnt] = processedSizes[i];
10748 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10749 jpegSizesCnt += 2;
10750 }
10751 }
10752 return jpegSizesCnt;
10753}
10754
10755/*===========================================================================
10756 * FUNCTION : computeNoiseModelEntryS
10757 *
10758 * DESCRIPTION: function to map a given sensitivity to the S noise
10759 * model parameters in the DNG noise model.
10760 *
10761 * PARAMETERS : sens : the sensor sensitivity
10762 *
10763 ** RETURN : S (sensor amplification) noise
10764 *
10765 *==========================================================================*/
10766double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10767 double s = gCamCapability[mCameraId]->gradient_S * sens +
10768 gCamCapability[mCameraId]->offset_S;
10769 return ((s < 0.0) ? 0.0 : s);
10770}
10771
10772/*===========================================================================
10773 * FUNCTION : computeNoiseModelEntryO
10774 *
10775 * DESCRIPTION: function to map a given sensitivity to the O noise
10776 * model parameters in the DNG noise model.
10777 *
10778 * PARAMETERS : sens : the sensor sensitivity
10779 *
10780 ** RETURN : O (sensor readout) noise
10781 *
10782 *==========================================================================*/
10783double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10784 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10785 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10786 1.0 : (1.0 * sens / max_analog_sens);
10787 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10788 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10789 return ((o < 0.0) ? 0.0 : o);
10790}
10791
10792/*===========================================================================
10793 * FUNCTION : getSensorSensitivity
10794 *
10795 * DESCRIPTION: convert iso_mode to an integer value
10796 *
10797 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10798 *
10799 ** RETURN : sensitivity supported by sensor
10800 *
10801 *==========================================================================*/
10802int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10803{
10804 int32_t sensitivity;
10805
10806 switch (iso_mode) {
10807 case CAM_ISO_MODE_100:
10808 sensitivity = 100;
10809 break;
10810 case CAM_ISO_MODE_200:
10811 sensitivity = 200;
10812 break;
10813 case CAM_ISO_MODE_400:
10814 sensitivity = 400;
10815 break;
10816 case CAM_ISO_MODE_800:
10817 sensitivity = 800;
10818 break;
10819 case CAM_ISO_MODE_1600:
10820 sensitivity = 1600;
10821 break;
10822 default:
10823 sensitivity = -1;
10824 break;
10825 }
10826 return sensitivity;
10827}
10828
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010829int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010830 if (gEaselManagerClient == nullptr) {
10831 gEaselManagerClient = EaselManagerClient::create();
10832 if (gEaselManagerClient == nullptr) {
10833 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10834 return -ENODEV;
10835 }
10836 }
10837
10838 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010839 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10840 // to connect to Easel.
10841 bool doNotpowerOnEasel =
10842 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10843
10844 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010845 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10846 return OK;
10847 }
10848
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010849 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010850 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010851 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010852 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010853 return res;
10854 }
10855
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010856 EaselManagerClientOpened = true;
10857
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010858 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010859 if (res != OK) {
10860 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10861 }
10862
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010863 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010864 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010865
10866 // Expose enableZsl key only when HDR+ mode is enabled.
10867 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010868 }
10869
10870 return OK;
10871}
10872
Thierry Strudel3d639192016-09-09 11:52:26 -070010873/*===========================================================================
10874 * FUNCTION : getCamInfo
10875 *
10876 * DESCRIPTION: query camera capabilities
10877 *
10878 * PARAMETERS :
10879 * @cameraId : camera Id
10880 * @info : camera info struct to be filled in with camera capabilities
10881 *
10882 * RETURN : int type of status
10883 * NO_ERROR -- success
10884 * none-zero failure code
10885 *==========================================================================*/
10886int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10887 struct camera_info *info)
10888{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010889 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010890 int rc = 0;
10891
10892 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010893
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010894 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070010895 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010896 rc = initHdrPlusClientLocked();
10897 if (rc != OK) {
10898 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10899 pthread_mutex_unlock(&gCamLock);
10900 return rc;
10901 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010902 }
10903
Thierry Strudel3d639192016-09-09 11:52:26 -070010904 if (NULL == gCamCapability[cameraId]) {
10905 rc = initCapabilities(cameraId);
10906 if (rc < 0) {
10907 pthread_mutex_unlock(&gCamLock);
10908 return rc;
10909 }
10910 }
10911
10912 if (NULL == gStaticMetadata[cameraId]) {
10913 rc = initStaticMetadata(cameraId);
10914 if (rc < 0) {
10915 pthread_mutex_unlock(&gCamLock);
10916 return rc;
10917 }
10918 }
10919
10920 switch(gCamCapability[cameraId]->position) {
10921 case CAM_POSITION_BACK:
10922 case CAM_POSITION_BACK_AUX:
10923 info->facing = CAMERA_FACING_BACK;
10924 break;
10925
10926 case CAM_POSITION_FRONT:
10927 case CAM_POSITION_FRONT_AUX:
10928 info->facing = CAMERA_FACING_FRONT;
10929 break;
10930
10931 default:
10932 LOGE("Unknown position type %d for camera id:%d",
10933 gCamCapability[cameraId]->position, cameraId);
10934 rc = -1;
10935 break;
10936 }
10937
10938
10939 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010940#ifndef USE_HAL_3_3
10941 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10942#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010943 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010944#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010945 info->static_camera_characteristics = gStaticMetadata[cameraId];
10946
10947 //For now assume both cameras can operate independently.
10948 info->conflicting_devices = NULL;
10949 info->conflicting_devices_length = 0;
10950
10951 //resource cost is 100 * MIN(1.0, m/M),
10952 //where m is throughput requirement with maximum stream configuration
10953 //and M is CPP maximum throughput.
10954 float max_fps = 0.0;
10955 for (uint32_t i = 0;
10956 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10957 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10958 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10959 }
10960 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10961 gCamCapability[cameraId]->active_array_size.width *
10962 gCamCapability[cameraId]->active_array_size.height * max_fps /
10963 gCamCapability[cameraId]->max_pixel_bandwidth;
10964 info->resource_cost = 100 * MIN(1.0, ratio);
10965 LOGI("camera %d resource cost is %d", cameraId,
10966 info->resource_cost);
10967
10968 pthread_mutex_unlock(&gCamLock);
10969 return rc;
10970}
10971
10972/*===========================================================================
10973 * FUNCTION : translateCapabilityToMetadata
10974 *
10975 * DESCRIPTION: translate the capability into camera_metadata_t
10976 *
10977 * PARAMETERS : type of the request
10978 *
10979 *
10980 * RETURN : success: camera_metadata_t*
10981 * failure: NULL
10982 *
10983 *==========================================================================*/
10984camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10985{
10986 if (mDefaultMetadata[type] != NULL) {
10987 return mDefaultMetadata[type];
10988 }
10989 //first time we are handling this request
10990 //fill up the metadata structure using the wrapper class
10991 CameraMetadata settings;
10992 //translate from cam_capability_t to camera_metadata_tag_t
10993 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10994 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10995 int32_t defaultRequestID = 0;
10996 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10997
10998 /* OIS disable */
10999 char ois_prop[PROPERTY_VALUE_MAX];
11000 memset(ois_prop, 0, sizeof(ois_prop));
11001 property_get("persist.camera.ois.disable", ois_prop, "0");
11002 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11003
11004 /* Force video to use OIS */
11005 char videoOisProp[PROPERTY_VALUE_MAX];
11006 memset(videoOisProp, 0, sizeof(videoOisProp));
11007 property_get("persist.camera.ois.video", videoOisProp, "1");
11008 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011009
11010 // Hybrid AE enable/disable
11011 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11012 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11013 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11014 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11015
Thierry Strudel3d639192016-09-09 11:52:26 -070011016 uint8_t controlIntent = 0;
11017 uint8_t focusMode;
11018 uint8_t vsMode;
11019 uint8_t optStabMode;
11020 uint8_t cacMode;
11021 uint8_t edge_mode;
11022 uint8_t noise_red_mode;
11023 uint8_t tonemap_mode;
11024 bool highQualityModeEntryAvailable = FALSE;
11025 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011026 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011027 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11028 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011029 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011030 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011031 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011032
Thierry Strudel3d639192016-09-09 11:52:26 -070011033 switch (type) {
11034 case CAMERA3_TEMPLATE_PREVIEW:
11035 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11036 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11037 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11038 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11039 edge_mode = ANDROID_EDGE_MODE_FAST;
11040 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11041 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11042 break;
11043 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11044 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11045 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11046 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11047 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11048 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11049 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11050 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11051 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11052 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11053 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11054 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11055 highQualityModeEntryAvailable = TRUE;
11056 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11057 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11058 fastModeEntryAvailable = TRUE;
11059 }
11060 }
11061 if (highQualityModeEntryAvailable) {
11062 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11063 } else if (fastModeEntryAvailable) {
11064 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11065 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011066 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11067 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11068 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011069 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011070 break;
11071 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11072 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11073 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11074 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011075 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11076 edge_mode = ANDROID_EDGE_MODE_FAST;
11077 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11078 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11079 if (forceVideoOis)
11080 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11081 break;
11082 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11083 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11084 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11085 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011086 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11087 edge_mode = ANDROID_EDGE_MODE_FAST;
11088 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11089 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11090 if (forceVideoOis)
11091 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11092 break;
11093 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11094 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11095 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11096 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11097 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11098 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11099 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11100 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11101 break;
11102 case CAMERA3_TEMPLATE_MANUAL:
11103 edge_mode = ANDROID_EDGE_MODE_FAST;
11104 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11105 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11106 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11107 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11108 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11109 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11110 break;
11111 default:
11112 edge_mode = ANDROID_EDGE_MODE_FAST;
11113 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11114 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11115 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11116 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11117 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11118 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11119 break;
11120 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011121 // Set CAC to OFF if underlying device doesn't support
11122 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11123 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11124 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011125 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11126 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11127 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11128 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11129 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11130 }
11131 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011132 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011133 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011134
11135 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11136 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11137 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11138 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11139 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11140 || ois_disable)
11141 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11142 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011143 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011144
11145 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11146 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11147
11148 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11149 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11150
11151 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11152 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11153
11154 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11155 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11156
11157 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11158 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11159
11160 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11161 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11162
11163 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11164 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11165
11166 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11167 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11168
11169 /*flash*/
11170 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11171 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11172
11173 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11174 settings.update(ANDROID_FLASH_FIRING_POWER,
11175 &flashFiringLevel, 1);
11176
11177 /* lens */
11178 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11179 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11180
11181 if (gCamCapability[mCameraId]->filter_densities_count) {
11182 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11183 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11184 gCamCapability[mCameraId]->filter_densities_count);
11185 }
11186
11187 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11188 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11189
Thierry Strudel3d639192016-09-09 11:52:26 -070011190 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11191 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11192
11193 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11194 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11195
11196 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11197 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11198
11199 /* face detection (default to OFF) */
11200 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11201 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11202
Thierry Strudel54dc9782017-02-15 12:12:10 -080011203 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11204 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011205
11206 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11207 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11208
11209 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11210 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11211
Thierry Strudel3d639192016-09-09 11:52:26 -070011212
11213 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11214 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11215
11216 /* Exposure time(Update the Min Exposure Time)*/
11217 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11218 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11219
11220 /* frame duration */
11221 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11222 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11223
11224 /* sensitivity */
11225 static const int32_t default_sensitivity = 100;
11226 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011227#ifndef USE_HAL_3_3
11228 static const int32_t default_isp_sensitivity =
11229 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11230 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11231#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011232
11233 /*edge mode*/
11234 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11235
11236 /*noise reduction mode*/
11237 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11238
11239 /*color correction mode*/
11240 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11241 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11242
11243 /*transform matrix mode*/
11244 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11245
11246 int32_t scaler_crop_region[4];
11247 scaler_crop_region[0] = 0;
11248 scaler_crop_region[1] = 0;
11249 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11250 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11251 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11252
11253 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11254 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11255
11256 /*focus distance*/
11257 float focus_distance = 0.0;
11258 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11259
11260 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011261 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011262 float max_range = 0.0;
11263 float max_fixed_fps = 0.0;
11264 int32_t fps_range[2] = {0, 0};
11265 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11266 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011267 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11268 TEMPLATE_MAX_PREVIEW_FPS) {
11269 continue;
11270 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011271 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11272 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11273 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11274 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11275 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11276 if (range > max_range) {
11277 fps_range[0] =
11278 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11279 fps_range[1] =
11280 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11281 max_range = range;
11282 }
11283 } else {
11284 if (range < 0.01 && max_fixed_fps <
11285 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11286 fps_range[0] =
11287 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11288 fps_range[1] =
11289 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11290 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11291 }
11292 }
11293 }
11294 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11295
11296 /*precapture trigger*/
11297 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11298 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11299
11300 /*af trigger*/
11301 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11302 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11303
11304 /* ae & af regions */
11305 int32_t active_region[] = {
11306 gCamCapability[mCameraId]->active_array_size.left,
11307 gCamCapability[mCameraId]->active_array_size.top,
11308 gCamCapability[mCameraId]->active_array_size.left +
11309 gCamCapability[mCameraId]->active_array_size.width,
11310 gCamCapability[mCameraId]->active_array_size.top +
11311 gCamCapability[mCameraId]->active_array_size.height,
11312 0};
11313 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11314 sizeof(active_region) / sizeof(active_region[0]));
11315 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11316 sizeof(active_region) / sizeof(active_region[0]));
11317
11318 /* black level lock */
11319 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11320 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11321
Thierry Strudel3d639192016-09-09 11:52:26 -070011322 //special defaults for manual template
11323 if (type == CAMERA3_TEMPLATE_MANUAL) {
11324 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11325 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11326
11327 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11328 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11329
11330 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11331 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11332
11333 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11334 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11335
11336 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11337 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11338
11339 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11340 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11341 }
11342
11343
11344 /* TNR
11345 * We'll use this location to determine which modes TNR will be set.
11346 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11347 * This is not to be confused with linking on a per stream basis that decision
11348 * is still on per-session basis and will be handled as part of config stream
11349 */
11350 uint8_t tnr_enable = 0;
11351
11352 if (m_bTnrPreview || m_bTnrVideo) {
11353
11354 switch (type) {
11355 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11356 tnr_enable = 1;
11357 break;
11358
11359 default:
11360 tnr_enable = 0;
11361 break;
11362 }
11363
11364 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11365 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11366 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11367
11368 LOGD("TNR:%d with process plate %d for template:%d",
11369 tnr_enable, tnr_process_type, type);
11370 }
11371
11372 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011373 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011374 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11375
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011376 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011377 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11378
Shuzhen Wang920ea402017-05-03 08:49:39 -070011379 uint8_t related_camera_id = mCameraId;
11380 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011381
11382 /* CDS default */
11383 char prop[PROPERTY_VALUE_MAX];
11384 memset(prop, 0, sizeof(prop));
11385 property_get("persist.camera.CDS", prop, "Auto");
11386 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11387 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11388 if (CAM_CDS_MODE_MAX == cds_mode) {
11389 cds_mode = CAM_CDS_MODE_AUTO;
11390 }
11391
11392 /* Disabling CDS in templates which have TNR enabled*/
11393 if (tnr_enable)
11394 cds_mode = CAM_CDS_MODE_OFF;
11395
11396 int32_t mode = cds_mode;
11397 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011398
Thierry Strudel269c81a2016-10-12 12:13:59 -070011399 /* Manual Convergence AEC Speed is disabled by default*/
11400 float default_aec_speed = 0;
11401 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11402
11403 /* Manual Convergence AWB Speed is disabled by default*/
11404 float default_awb_speed = 0;
11405 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11406
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011407 // Set instant AEC to normal convergence by default
11408 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11409 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11410
Shuzhen Wang19463d72016-03-08 11:09:52 -080011411 /* hybrid ae */
11412 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11413
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011414 if (gExposeEnableZslKey) {
11415 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11416 }
11417
Thierry Strudel3d639192016-09-09 11:52:26 -070011418 mDefaultMetadata[type] = settings.release();
11419
11420 return mDefaultMetadata[type];
11421}
11422
11423/*===========================================================================
11424 * FUNCTION : setFrameParameters
11425 *
11426 * DESCRIPTION: set parameters per frame as requested in the metadata from
11427 * framework
11428 *
11429 * PARAMETERS :
11430 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011431 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011432 * @blob_request: Whether this request is a blob request or not
11433 *
11434 * RETURN : success: NO_ERROR
11435 * failure:
11436 *==========================================================================*/
11437int QCamera3HardwareInterface::setFrameParameters(
11438 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011439 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011440 int blob_request,
11441 uint32_t snapshotStreamId)
11442{
11443 /*translate from camera_metadata_t type to parm_type_t*/
11444 int rc = 0;
11445 int32_t hal_version = CAM_HAL_V3;
11446
11447 clear_metadata_buffer(mParameters);
11448 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11449 LOGE("Failed to set hal version in the parameters");
11450 return BAD_VALUE;
11451 }
11452
11453 /*we need to update the frame number in the parameters*/
11454 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11455 request->frame_number)) {
11456 LOGE("Failed to set the frame number in the parameters");
11457 return BAD_VALUE;
11458 }
11459
11460 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011461 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011462 LOGE("Failed to set stream type mask in the parameters");
11463 return BAD_VALUE;
11464 }
11465
11466 if (mUpdateDebugLevel) {
11467 uint32_t dummyDebugLevel = 0;
11468 /* The value of dummyDebugLevel is irrelavent. On
11469 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11470 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11471 dummyDebugLevel)) {
11472 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11473 return BAD_VALUE;
11474 }
11475 mUpdateDebugLevel = false;
11476 }
11477
11478 if(request->settings != NULL){
11479 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11480 if (blob_request)
11481 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11482 }
11483
11484 return rc;
11485}
11486
11487/*===========================================================================
11488 * FUNCTION : setReprocParameters
11489 *
11490 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11491 * return it.
11492 *
11493 * PARAMETERS :
11494 * @request : request that needs to be serviced
11495 *
11496 * RETURN : success: NO_ERROR
11497 * failure:
11498 *==========================================================================*/
11499int32_t QCamera3HardwareInterface::setReprocParameters(
11500 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11501 uint32_t snapshotStreamId)
11502{
11503 /*translate from camera_metadata_t type to parm_type_t*/
11504 int rc = 0;
11505
11506 if (NULL == request->settings){
11507 LOGE("Reprocess settings cannot be NULL");
11508 return BAD_VALUE;
11509 }
11510
11511 if (NULL == reprocParam) {
11512 LOGE("Invalid reprocessing metadata buffer");
11513 return BAD_VALUE;
11514 }
11515 clear_metadata_buffer(reprocParam);
11516
11517 /*we need to update the frame number in the parameters*/
11518 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11519 request->frame_number)) {
11520 LOGE("Failed to set the frame number in the parameters");
11521 return BAD_VALUE;
11522 }
11523
11524 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11525 if (rc < 0) {
11526 LOGE("Failed to translate reproc request");
11527 return rc;
11528 }
11529
11530 CameraMetadata frame_settings;
11531 frame_settings = request->settings;
11532 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11533 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11534 int32_t *crop_count =
11535 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11536 int32_t *crop_data =
11537 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11538 int32_t *roi_map =
11539 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11540 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11541 cam_crop_data_t crop_meta;
11542 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11543 crop_meta.num_of_streams = 1;
11544 crop_meta.crop_info[0].crop.left = crop_data[0];
11545 crop_meta.crop_info[0].crop.top = crop_data[1];
11546 crop_meta.crop_info[0].crop.width = crop_data[2];
11547 crop_meta.crop_info[0].crop.height = crop_data[3];
11548
11549 crop_meta.crop_info[0].roi_map.left =
11550 roi_map[0];
11551 crop_meta.crop_info[0].roi_map.top =
11552 roi_map[1];
11553 crop_meta.crop_info[0].roi_map.width =
11554 roi_map[2];
11555 crop_meta.crop_info[0].roi_map.height =
11556 roi_map[3];
11557
11558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11559 rc = BAD_VALUE;
11560 }
11561 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11562 request->input_buffer->stream,
11563 crop_meta.crop_info[0].crop.left,
11564 crop_meta.crop_info[0].crop.top,
11565 crop_meta.crop_info[0].crop.width,
11566 crop_meta.crop_info[0].crop.height);
11567 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11568 request->input_buffer->stream,
11569 crop_meta.crop_info[0].roi_map.left,
11570 crop_meta.crop_info[0].roi_map.top,
11571 crop_meta.crop_info[0].roi_map.width,
11572 crop_meta.crop_info[0].roi_map.height);
11573 } else {
11574 LOGE("Invalid reprocess crop count %d!", *crop_count);
11575 }
11576 } else {
11577 LOGE("No crop data from matching output stream");
11578 }
11579
11580 /* These settings are not needed for regular requests so handle them specially for
11581 reprocess requests; information needed for EXIF tags */
11582 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11583 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11584 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11585 if (NAME_NOT_FOUND != val) {
11586 uint32_t flashMode = (uint32_t)val;
11587 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11588 rc = BAD_VALUE;
11589 }
11590 } else {
11591 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11592 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11593 }
11594 } else {
11595 LOGH("No flash mode in reprocess settings");
11596 }
11597
11598 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11599 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11600 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11601 rc = BAD_VALUE;
11602 }
11603 } else {
11604 LOGH("No flash state in reprocess settings");
11605 }
11606
11607 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11608 uint8_t *reprocessFlags =
11609 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11610 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11611 *reprocessFlags)) {
11612 rc = BAD_VALUE;
11613 }
11614 }
11615
Thierry Strudel54dc9782017-02-15 12:12:10 -080011616 // Add exif debug data to internal metadata
11617 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11618 mm_jpeg_debug_exif_params_t *debug_params =
11619 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11620 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11621 // AE
11622 if (debug_params->ae_debug_params_valid == TRUE) {
11623 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11624 debug_params->ae_debug_params);
11625 }
11626 // AWB
11627 if (debug_params->awb_debug_params_valid == TRUE) {
11628 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11629 debug_params->awb_debug_params);
11630 }
11631 // AF
11632 if (debug_params->af_debug_params_valid == TRUE) {
11633 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11634 debug_params->af_debug_params);
11635 }
11636 // ASD
11637 if (debug_params->asd_debug_params_valid == TRUE) {
11638 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11639 debug_params->asd_debug_params);
11640 }
11641 // Stats
11642 if (debug_params->stats_debug_params_valid == TRUE) {
11643 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11644 debug_params->stats_debug_params);
11645 }
11646 // BE Stats
11647 if (debug_params->bestats_debug_params_valid == TRUE) {
11648 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11649 debug_params->bestats_debug_params);
11650 }
11651 // BHIST
11652 if (debug_params->bhist_debug_params_valid == TRUE) {
11653 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11654 debug_params->bhist_debug_params);
11655 }
11656 // 3A Tuning
11657 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11658 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11659 debug_params->q3a_tuning_debug_params);
11660 }
11661 }
11662
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011663 // Add metadata which reprocess needs
11664 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11665 cam_reprocess_info_t *repro_info =
11666 (cam_reprocess_info_t *)frame_settings.find
11667 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011668 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011669 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011670 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011671 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011672 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011673 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011674 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011675 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011676 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011677 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011678 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011679 repro_info->pipeline_flip);
11680 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11681 repro_info->af_roi);
11682 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11683 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011684 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11685 CAM_INTF_PARM_ROTATION metadata then has been added in
11686 translateToHalMetadata. HAL need to keep this new rotation
11687 metadata. Otherwise, the old rotation info saved in the vendor tag
11688 would be used */
11689 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11690 CAM_INTF_PARM_ROTATION, reprocParam) {
11691 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11692 } else {
11693 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011694 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011695 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011696 }
11697
11698 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11699 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11700 roi.width and roi.height would be the final JPEG size.
11701 For now, HAL only checks this for reprocess request */
11702 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11703 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11704 uint8_t *enable =
11705 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11706 if (*enable == TRUE) {
11707 int32_t *crop_data =
11708 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11709 cam_stream_crop_info_t crop_meta;
11710 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11711 crop_meta.stream_id = 0;
11712 crop_meta.crop.left = crop_data[0];
11713 crop_meta.crop.top = crop_data[1];
11714 crop_meta.crop.width = crop_data[2];
11715 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011716 // The JPEG crop roi should match cpp output size
11717 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11718 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11719 crop_meta.roi_map.left = 0;
11720 crop_meta.roi_map.top = 0;
11721 crop_meta.roi_map.width = cpp_crop->crop.width;
11722 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011723 }
11724 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11725 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011726 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011727 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011728 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11729 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011730 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011731 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11732
11733 // Add JPEG scale information
11734 cam_dimension_t scale_dim;
11735 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11736 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11737 int32_t *roi =
11738 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11739 scale_dim.width = roi[2];
11740 scale_dim.height = roi[3];
11741 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11742 scale_dim);
11743 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11744 scale_dim.width, scale_dim.height, mCameraId);
11745 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011746 }
11747 }
11748
11749 return rc;
11750}
11751
11752/*===========================================================================
11753 * FUNCTION : saveRequestSettings
11754 *
11755 * DESCRIPTION: Add any settings that might have changed to the request settings
11756 * and save the settings to be applied on the frame
11757 *
11758 * PARAMETERS :
11759 * @jpegMetadata : the extracted and/or modified jpeg metadata
11760 * @request : request with initial settings
11761 *
11762 * RETURN :
11763 * camera_metadata_t* : pointer to the saved request settings
11764 *==========================================================================*/
11765camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11766 const CameraMetadata &jpegMetadata,
11767 camera3_capture_request_t *request)
11768{
11769 camera_metadata_t *resultMetadata;
11770 CameraMetadata camMetadata;
11771 camMetadata = request->settings;
11772
11773 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11774 int32_t thumbnail_size[2];
11775 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11776 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11777 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11778 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11779 }
11780
11781 if (request->input_buffer != NULL) {
11782 uint8_t reprocessFlags = 1;
11783 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11784 (uint8_t*)&reprocessFlags,
11785 sizeof(reprocessFlags));
11786 }
11787
11788 resultMetadata = camMetadata.release();
11789 return resultMetadata;
11790}
11791
11792/*===========================================================================
11793 * FUNCTION : setHalFpsRange
11794 *
11795 * DESCRIPTION: set FPS range parameter
11796 *
11797 *
11798 * PARAMETERS :
11799 * @settings : Metadata from framework
11800 * @hal_metadata: Metadata buffer
11801 *
11802 *
11803 * RETURN : success: NO_ERROR
11804 * failure:
11805 *==========================================================================*/
11806int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11807 metadata_buffer_t *hal_metadata)
11808{
11809 int32_t rc = NO_ERROR;
11810 cam_fps_range_t fps_range;
11811 fps_range.min_fps = (float)
11812 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11813 fps_range.max_fps = (float)
11814 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11815 fps_range.video_min_fps = fps_range.min_fps;
11816 fps_range.video_max_fps = fps_range.max_fps;
11817
11818 LOGD("aeTargetFpsRange fps: [%f %f]",
11819 fps_range.min_fps, fps_range.max_fps);
11820 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11821 * follows:
11822 * ---------------------------------------------------------------|
11823 * Video stream is absent in configure_streams |
11824 * (Camcorder preview before the first video record |
11825 * ---------------------------------------------------------------|
11826 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11827 * | | | vid_min/max_fps|
11828 * ---------------------------------------------------------------|
11829 * NO | [ 30, 240] | 240 | [240, 240] |
11830 * |-------------|-------------|----------------|
11831 * | [240, 240] | 240 | [240, 240] |
11832 * ---------------------------------------------------------------|
11833 * Video stream is present in configure_streams |
11834 * ---------------------------------------------------------------|
11835 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11836 * | | | vid_min/max_fps|
11837 * ---------------------------------------------------------------|
11838 * NO | [ 30, 240] | 240 | [240, 240] |
11839 * (camcorder prev |-------------|-------------|----------------|
11840 * after video rec | [240, 240] | 240 | [240, 240] |
11841 * is stopped) | | | |
11842 * ---------------------------------------------------------------|
11843 * YES | [ 30, 240] | 240 | [240, 240] |
11844 * |-------------|-------------|----------------|
11845 * | [240, 240] | 240 | [240, 240] |
11846 * ---------------------------------------------------------------|
11847 * When Video stream is absent in configure_streams,
11848 * preview fps = sensor_fps / batchsize
11849 * Eg: for 240fps at batchSize 4, preview = 60fps
11850 * for 120fps at batchSize 4, preview = 30fps
11851 *
11852 * When video stream is present in configure_streams, preview fps is as per
11853 * the ratio of preview buffers to video buffers requested in process
11854 * capture request
11855 */
11856 mBatchSize = 0;
11857 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11858 fps_range.min_fps = fps_range.video_max_fps;
11859 fps_range.video_min_fps = fps_range.video_max_fps;
11860 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11861 fps_range.max_fps);
11862 if (NAME_NOT_FOUND != val) {
11863 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11864 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11865 return BAD_VALUE;
11866 }
11867
11868 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11869 /* If batchmode is currently in progress and the fps changes,
11870 * set the flag to restart the sensor */
11871 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11872 (mHFRVideoFps != fps_range.max_fps)) {
11873 mNeedSensorRestart = true;
11874 }
11875 mHFRVideoFps = fps_range.max_fps;
11876 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11877 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11878 mBatchSize = MAX_HFR_BATCH_SIZE;
11879 }
11880 }
11881 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11882
11883 }
11884 } else {
11885 /* HFR mode is session param in backend/ISP. This should be reset when
11886 * in non-HFR mode */
11887 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11888 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11889 return BAD_VALUE;
11890 }
11891 }
11892 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11893 return BAD_VALUE;
11894 }
11895 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11896 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11897 return rc;
11898}
11899
11900/*===========================================================================
11901 * FUNCTION : translateToHalMetadata
11902 *
11903 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11904 *
11905 *
11906 * PARAMETERS :
11907 * @request : request sent from framework
11908 *
11909 *
11910 * RETURN : success: NO_ERROR
11911 * failure:
11912 *==========================================================================*/
11913int QCamera3HardwareInterface::translateToHalMetadata
11914 (const camera3_capture_request_t *request,
11915 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011916 uint32_t snapshotStreamId) {
11917 if (request == nullptr || hal_metadata == nullptr) {
11918 return BAD_VALUE;
11919 }
11920
11921 int64_t minFrameDuration = getMinFrameDuration(request);
11922
11923 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11924 minFrameDuration);
11925}
11926
11927int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11928 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11929 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11930
Thierry Strudel3d639192016-09-09 11:52:26 -070011931 int rc = 0;
11932 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011933 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011934
11935 /* Do not change the order of the following list unless you know what you are
11936 * doing.
11937 * The order is laid out in such a way that parameters in the front of the table
11938 * may be used to override the parameters later in the table. Examples are:
11939 * 1. META_MODE should precede AEC/AWB/AF MODE
11940 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11941 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11942 * 4. Any mode should precede it's corresponding settings
11943 */
11944 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11945 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11946 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11947 rc = BAD_VALUE;
11948 }
11949 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11950 if (rc != NO_ERROR) {
11951 LOGE("extractSceneMode failed");
11952 }
11953 }
11954
11955 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11956 uint8_t fwk_aeMode =
11957 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11958 uint8_t aeMode;
11959 int32_t redeye;
11960
11961 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11962 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011963 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11964 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011965 } else {
11966 aeMode = CAM_AE_MODE_ON;
11967 }
11968 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11969 redeye = 1;
11970 } else {
11971 redeye = 0;
11972 }
11973
11974 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11975 fwk_aeMode);
11976 if (NAME_NOT_FOUND != val) {
11977 int32_t flashMode = (int32_t)val;
11978 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11979 }
11980
11981 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11982 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11983 rc = BAD_VALUE;
11984 }
11985 }
11986
11987 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11988 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11989 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11990 fwk_whiteLevel);
11991 if (NAME_NOT_FOUND != val) {
11992 uint8_t whiteLevel = (uint8_t)val;
11993 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11994 rc = BAD_VALUE;
11995 }
11996 }
11997 }
11998
11999 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12000 uint8_t fwk_cacMode =
12001 frame_settings.find(
12002 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12003 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12004 fwk_cacMode);
12005 if (NAME_NOT_FOUND != val) {
12006 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12007 bool entryAvailable = FALSE;
12008 // Check whether Frameworks set CAC mode is supported in device or not
12009 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12010 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12011 entryAvailable = TRUE;
12012 break;
12013 }
12014 }
12015 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12016 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12017 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12018 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12019 if (entryAvailable == FALSE) {
12020 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12021 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12022 } else {
12023 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12024 // High is not supported and so set the FAST as spec say's underlying
12025 // device implementation can be the same for both modes.
12026 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12027 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12028 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12029 // in order to avoid the fps drop due to high quality
12030 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12031 } else {
12032 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12033 }
12034 }
12035 }
12036 LOGD("Final cacMode is %d", cacMode);
12037 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12038 rc = BAD_VALUE;
12039 }
12040 } else {
12041 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12042 }
12043 }
12044
Thierry Strudel2896d122017-02-23 19:18:03 -080012045 char af_value[PROPERTY_VALUE_MAX];
12046 property_get("persist.camera.af.infinity", af_value, "0");
12047
Jason Lee84ae9972017-02-24 13:24:24 -080012048 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080012049 if (atoi(af_value) == 0) {
12050 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012051 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012052 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12053 fwk_focusMode);
12054 if (NAME_NOT_FOUND != val) {
12055 uint8_t focusMode = (uint8_t)val;
12056 LOGD("set focus mode %d", focusMode);
12057 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12058 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12059 rc = BAD_VALUE;
12060 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012061 }
12062 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012063 } else {
12064 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12065 LOGE("Focus forced to infinity %d", focusMode);
12066 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12067 rc = BAD_VALUE;
12068 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012069 }
12070
Jason Lee84ae9972017-02-24 13:24:24 -080012071 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12072 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012073 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12074 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12075 focalDistance)) {
12076 rc = BAD_VALUE;
12077 }
12078 }
12079
12080 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12081 uint8_t fwk_antibandingMode =
12082 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12083 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12084 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12085 if (NAME_NOT_FOUND != val) {
12086 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012087 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12088 if (m60HzZone) {
12089 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12090 } else {
12091 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12092 }
12093 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012094 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12095 hal_antibandingMode)) {
12096 rc = BAD_VALUE;
12097 }
12098 }
12099 }
12100
12101 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12102 int32_t expCompensation = frame_settings.find(
12103 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12104 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12105 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12106 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12107 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012108 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012109 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12110 expCompensation)) {
12111 rc = BAD_VALUE;
12112 }
12113 }
12114
12115 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12116 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12117 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12118 rc = BAD_VALUE;
12119 }
12120 }
12121 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12122 rc = setHalFpsRange(frame_settings, hal_metadata);
12123 if (rc != NO_ERROR) {
12124 LOGE("setHalFpsRange failed");
12125 }
12126 }
12127
12128 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12129 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12130 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12131 rc = BAD_VALUE;
12132 }
12133 }
12134
12135 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12136 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12137 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12138 fwk_effectMode);
12139 if (NAME_NOT_FOUND != val) {
12140 uint8_t effectMode = (uint8_t)val;
12141 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12142 rc = BAD_VALUE;
12143 }
12144 }
12145 }
12146
12147 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12148 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12149 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12150 colorCorrectMode)) {
12151 rc = BAD_VALUE;
12152 }
12153 }
12154
12155 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12156 cam_color_correct_gains_t colorCorrectGains;
12157 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12158 colorCorrectGains.gains[i] =
12159 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12160 }
12161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12162 colorCorrectGains)) {
12163 rc = BAD_VALUE;
12164 }
12165 }
12166
12167 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12168 cam_color_correct_matrix_t colorCorrectTransform;
12169 cam_rational_type_t transform_elem;
12170 size_t num = 0;
12171 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12172 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12173 transform_elem.numerator =
12174 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12175 transform_elem.denominator =
12176 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12177 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12178 num++;
12179 }
12180 }
12181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12182 colorCorrectTransform)) {
12183 rc = BAD_VALUE;
12184 }
12185 }
12186
12187 cam_trigger_t aecTrigger;
12188 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12189 aecTrigger.trigger_id = -1;
12190 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12191 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12192 aecTrigger.trigger =
12193 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12194 aecTrigger.trigger_id =
12195 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12196 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12197 aecTrigger)) {
12198 rc = BAD_VALUE;
12199 }
12200 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12201 aecTrigger.trigger, aecTrigger.trigger_id);
12202 }
12203
12204 /*af_trigger must come with a trigger id*/
12205 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12206 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12207 cam_trigger_t af_trigger;
12208 af_trigger.trigger =
12209 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12210 af_trigger.trigger_id =
12211 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12212 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12213 rc = BAD_VALUE;
12214 }
12215 LOGD("AfTrigger: %d AfTriggerID: %d",
12216 af_trigger.trigger, af_trigger.trigger_id);
12217 }
12218
12219 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12220 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12221 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12222 rc = BAD_VALUE;
12223 }
12224 }
12225 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12226 cam_edge_application_t edge_application;
12227 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012228
Thierry Strudel3d639192016-09-09 11:52:26 -070012229 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12230 edge_application.sharpness = 0;
12231 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012232 edge_application.sharpness =
12233 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12234 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12235 int32_t sharpness =
12236 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12237 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12238 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12239 LOGD("Setting edge mode sharpness %d", sharpness);
12240 edge_application.sharpness = sharpness;
12241 }
12242 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012243 }
12244 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12245 rc = BAD_VALUE;
12246 }
12247 }
12248
12249 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12250 int32_t respectFlashMode = 1;
12251 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12252 uint8_t fwk_aeMode =
12253 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012254 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12255 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12256 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012257 respectFlashMode = 0;
12258 LOGH("AE Mode controls flash, ignore android.flash.mode");
12259 }
12260 }
12261 if (respectFlashMode) {
12262 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12263 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12264 LOGH("flash mode after mapping %d", val);
12265 // To check: CAM_INTF_META_FLASH_MODE usage
12266 if (NAME_NOT_FOUND != val) {
12267 uint8_t flashMode = (uint8_t)val;
12268 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12269 rc = BAD_VALUE;
12270 }
12271 }
12272 }
12273 }
12274
12275 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12276 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12277 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12278 rc = BAD_VALUE;
12279 }
12280 }
12281
12282 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12283 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12284 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12285 flashFiringTime)) {
12286 rc = BAD_VALUE;
12287 }
12288 }
12289
12290 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12291 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12292 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12293 hotPixelMode)) {
12294 rc = BAD_VALUE;
12295 }
12296 }
12297
12298 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12299 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12300 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12301 lensAperture)) {
12302 rc = BAD_VALUE;
12303 }
12304 }
12305
12306 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12307 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12308 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12309 filterDensity)) {
12310 rc = BAD_VALUE;
12311 }
12312 }
12313
12314 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12315 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12316 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12317 focalLength)) {
12318 rc = BAD_VALUE;
12319 }
12320 }
12321
12322 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12323 uint8_t optStabMode =
12324 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12326 optStabMode)) {
12327 rc = BAD_VALUE;
12328 }
12329 }
12330
12331 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12332 uint8_t videoStabMode =
12333 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12334 LOGD("videoStabMode from APP = %d", videoStabMode);
12335 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12336 videoStabMode)) {
12337 rc = BAD_VALUE;
12338 }
12339 }
12340
12341
12342 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12343 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12344 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12345 noiseRedMode)) {
12346 rc = BAD_VALUE;
12347 }
12348 }
12349
12350 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12351 float reprocessEffectiveExposureFactor =
12352 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12353 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12354 reprocessEffectiveExposureFactor)) {
12355 rc = BAD_VALUE;
12356 }
12357 }
12358
12359 cam_crop_region_t scalerCropRegion;
12360 bool scalerCropSet = false;
12361 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12362 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12363 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12364 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12365 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12366
12367 // Map coordinate system from active array to sensor output.
12368 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12369 scalerCropRegion.width, scalerCropRegion.height);
12370
12371 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12372 scalerCropRegion)) {
12373 rc = BAD_VALUE;
12374 }
12375 scalerCropSet = true;
12376 }
12377
12378 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12379 int64_t sensorExpTime =
12380 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12381 LOGD("setting sensorExpTime %lld", sensorExpTime);
12382 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12383 sensorExpTime)) {
12384 rc = BAD_VALUE;
12385 }
12386 }
12387
12388 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12389 int64_t sensorFrameDuration =
12390 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012391 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12392 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12393 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12394 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12395 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12396 sensorFrameDuration)) {
12397 rc = BAD_VALUE;
12398 }
12399 }
12400
12401 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12402 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12403 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12404 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12405 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12406 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12407 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12408 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12409 sensorSensitivity)) {
12410 rc = BAD_VALUE;
12411 }
12412 }
12413
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012414#ifndef USE_HAL_3_3
12415 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12416 int32_t ispSensitivity =
12417 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12418 if (ispSensitivity <
12419 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12420 ispSensitivity =
12421 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12422 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12423 }
12424 if (ispSensitivity >
12425 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12426 ispSensitivity =
12427 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12428 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12429 }
12430 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12431 ispSensitivity)) {
12432 rc = BAD_VALUE;
12433 }
12434 }
12435#endif
12436
Thierry Strudel3d639192016-09-09 11:52:26 -070012437 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12438 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12439 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12440 rc = BAD_VALUE;
12441 }
12442 }
12443
12444 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12445 uint8_t fwk_facedetectMode =
12446 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12447
12448 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12449 fwk_facedetectMode);
12450
12451 if (NAME_NOT_FOUND != val) {
12452 uint8_t facedetectMode = (uint8_t)val;
12453 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12454 facedetectMode)) {
12455 rc = BAD_VALUE;
12456 }
12457 }
12458 }
12459
Thierry Strudel54dc9782017-02-15 12:12:10 -080012460 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012461 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012462 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012463 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12464 histogramMode)) {
12465 rc = BAD_VALUE;
12466 }
12467 }
12468
12469 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12470 uint8_t sharpnessMapMode =
12471 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12472 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12473 sharpnessMapMode)) {
12474 rc = BAD_VALUE;
12475 }
12476 }
12477
12478 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12479 uint8_t tonemapMode =
12480 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12481 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12482 rc = BAD_VALUE;
12483 }
12484 }
12485 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12486 /*All tonemap channels will have the same number of points*/
12487 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12488 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12489 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12490 cam_rgb_tonemap_curves tonemapCurves;
12491 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12492 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12493 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12494 tonemapCurves.tonemap_points_cnt,
12495 CAM_MAX_TONEMAP_CURVE_SIZE);
12496 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12497 }
12498
12499 /* ch0 = G*/
12500 size_t point = 0;
12501 cam_tonemap_curve_t tonemapCurveGreen;
12502 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12503 for (size_t j = 0; j < 2; j++) {
12504 tonemapCurveGreen.tonemap_points[i][j] =
12505 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12506 point++;
12507 }
12508 }
12509 tonemapCurves.curves[0] = tonemapCurveGreen;
12510
12511 /* ch 1 = B */
12512 point = 0;
12513 cam_tonemap_curve_t tonemapCurveBlue;
12514 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12515 for (size_t j = 0; j < 2; j++) {
12516 tonemapCurveBlue.tonemap_points[i][j] =
12517 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12518 point++;
12519 }
12520 }
12521 tonemapCurves.curves[1] = tonemapCurveBlue;
12522
12523 /* ch 2 = R */
12524 point = 0;
12525 cam_tonemap_curve_t tonemapCurveRed;
12526 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12527 for (size_t j = 0; j < 2; j++) {
12528 tonemapCurveRed.tonemap_points[i][j] =
12529 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12530 point++;
12531 }
12532 }
12533 tonemapCurves.curves[2] = tonemapCurveRed;
12534
12535 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12536 tonemapCurves)) {
12537 rc = BAD_VALUE;
12538 }
12539 }
12540
12541 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12542 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12543 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12544 captureIntent)) {
12545 rc = BAD_VALUE;
12546 }
12547 }
12548
12549 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12550 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12551 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12552 blackLevelLock)) {
12553 rc = BAD_VALUE;
12554 }
12555 }
12556
12557 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12558 uint8_t lensShadingMapMode =
12559 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12560 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12561 lensShadingMapMode)) {
12562 rc = BAD_VALUE;
12563 }
12564 }
12565
12566 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12567 cam_area_t roi;
12568 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012569 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012570
12571 // Map coordinate system from active array to sensor output.
12572 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12573 roi.rect.height);
12574
12575 if (scalerCropSet) {
12576 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12577 }
12578 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12579 rc = BAD_VALUE;
12580 }
12581 }
12582
12583 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12584 cam_area_t roi;
12585 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012586 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012587
12588 // Map coordinate system from active array to sensor output.
12589 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12590 roi.rect.height);
12591
12592 if (scalerCropSet) {
12593 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12594 }
12595 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12596 rc = BAD_VALUE;
12597 }
12598 }
12599
12600 // CDS for non-HFR non-video mode
12601 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12602 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12603 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12604 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12605 LOGE("Invalid CDS mode %d!", *fwk_cds);
12606 } else {
12607 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12608 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12609 rc = BAD_VALUE;
12610 }
12611 }
12612 }
12613
Thierry Strudel04e026f2016-10-10 11:27:36 -070012614 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012615 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012616 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012617 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12618 }
12619 if (m_bVideoHdrEnabled)
12620 vhdr = CAM_VIDEO_HDR_MODE_ON;
12621
Thierry Strudel54dc9782017-02-15 12:12:10 -080012622 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12623
12624 if(vhdr != curr_hdr_state)
12625 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12626
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012627 rc = setVideoHdrMode(mParameters, vhdr);
12628 if (rc != NO_ERROR) {
12629 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012630 }
12631
12632 //IR
12633 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12634 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12635 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012636 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12637 uint8_t isIRon = 0;
12638
12639 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012640 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12641 LOGE("Invalid IR mode %d!", fwk_ir);
12642 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012643 if(isIRon != curr_ir_state )
12644 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12645
Thierry Strudel04e026f2016-10-10 11:27:36 -070012646 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12647 CAM_INTF_META_IR_MODE, fwk_ir)) {
12648 rc = BAD_VALUE;
12649 }
12650 }
12651 }
12652
Thierry Strudel54dc9782017-02-15 12:12:10 -080012653 //Binning Correction Mode
12654 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12655 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12656 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12657 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12658 || (0 > fwk_binning_correction)) {
12659 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12660 } else {
12661 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12662 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12663 rc = BAD_VALUE;
12664 }
12665 }
12666 }
12667
Thierry Strudel269c81a2016-10-12 12:13:59 -070012668 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12669 float aec_speed;
12670 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12671 LOGD("AEC Speed :%f", aec_speed);
12672 if ( aec_speed < 0 ) {
12673 LOGE("Invalid AEC mode %f!", aec_speed);
12674 } else {
12675 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12676 aec_speed)) {
12677 rc = BAD_VALUE;
12678 }
12679 }
12680 }
12681
12682 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12683 float awb_speed;
12684 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12685 LOGD("AWB Speed :%f", awb_speed);
12686 if ( awb_speed < 0 ) {
12687 LOGE("Invalid AWB mode %f!", awb_speed);
12688 } else {
12689 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12690 awb_speed)) {
12691 rc = BAD_VALUE;
12692 }
12693 }
12694 }
12695
Thierry Strudel3d639192016-09-09 11:52:26 -070012696 // TNR
12697 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12698 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12699 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012700 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012701 cam_denoise_param_t tnr;
12702 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12703 tnr.process_plates =
12704 (cam_denoise_process_type_t)frame_settings.find(
12705 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12706 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012707
12708 if(b_TnrRequested != curr_tnr_state)
12709 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12710
Thierry Strudel3d639192016-09-09 11:52:26 -070012711 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12712 rc = BAD_VALUE;
12713 }
12714 }
12715
Thierry Strudel54dc9782017-02-15 12:12:10 -080012716 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012717 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012718 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012719 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12720 *exposure_metering_mode)) {
12721 rc = BAD_VALUE;
12722 }
12723 }
12724
Thierry Strudel3d639192016-09-09 11:52:26 -070012725 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12726 int32_t fwk_testPatternMode =
12727 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12728 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12729 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12730
12731 if (NAME_NOT_FOUND != testPatternMode) {
12732 cam_test_pattern_data_t testPatternData;
12733 memset(&testPatternData, 0, sizeof(testPatternData));
12734 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12735 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12736 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12737 int32_t *fwk_testPatternData =
12738 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12739 testPatternData.r = fwk_testPatternData[0];
12740 testPatternData.b = fwk_testPatternData[3];
12741 switch (gCamCapability[mCameraId]->color_arrangement) {
12742 case CAM_FILTER_ARRANGEMENT_RGGB:
12743 case CAM_FILTER_ARRANGEMENT_GRBG:
12744 testPatternData.gr = fwk_testPatternData[1];
12745 testPatternData.gb = fwk_testPatternData[2];
12746 break;
12747 case CAM_FILTER_ARRANGEMENT_GBRG:
12748 case CAM_FILTER_ARRANGEMENT_BGGR:
12749 testPatternData.gr = fwk_testPatternData[2];
12750 testPatternData.gb = fwk_testPatternData[1];
12751 break;
12752 default:
12753 LOGE("color arrangement %d is not supported",
12754 gCamCapability[mCameraId]->color_arrangement);
12755 break;
12756 }
12757 }
12758 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12759 testPatternData)) {
12760 rc = BAD_VALUE;
12761 }
12762 } else {
12763 LOGE("Invalid framework sensor test pattern mode %d",
12764 fwk_testPatternMode);
12765 }
12766 }
12767
12768 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12769 size_t count = 0;
12770 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12771 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12772 gps_coords.data.d, gps_coords.count, count);
12773 if (gps_coords.count != count) {
12774 rc = BAD_VALUE;
12775 }
12776 }
12777
12778 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12779 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12780 size_t count = 0;
12781 const char *gps_methods_src = (const char *)
12782 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12783 memset(gps_methods, '\0', sizeof(gps_methods));
12784 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12785 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12786 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12787 if (GPS_PROCESSING_METHOD_SIZE != count) {
12788 rc = BAD_VALUE;
12789 }
12790 }
12791
12792 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12793 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12794 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12795 gps_timestamp)) {
12796 rc = BAD_VALUE;
12797 }
12798 }
12799
12800 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12801 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12802 cam_rotation_info_t rotation_info;
12803 if (orientation == 0) {
12804 rotation_info.rotation = ROTATE_0;
12805 } else if (orientation == 90) {
12806 rotation_info.rotation = ROTATE_90;
12807 } else if (orientation == 180) {
12808 rotation_info.rotation = ROTATE_180;
12809 } else if (orientation == 270) {
12810 rotation_info.rotation = ROTATE_270;
12811 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012812 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012813 rotation_info.streamId = snapshotStreamId;
12814 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12815 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12816 rc = BAD_VALUE;
12817 }
12818 }
12819
12820 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12821 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12822 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12823 rc = BAD_VALUE;
12824 }
12825 }
12826
12827 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12828 uint32_t thumb_quality = (uint32_t)
12829 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12830 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12831 thumb_quality)) {
12832 rc = BAD_VALUE;
12833 }
12834 }
12835
12836 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12837 cam_dimension_t dim;
12838 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12839 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12841 rc = BAD_VALUE;
12842 }
12843 }
12844
12845 // Internal metadata
12846 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12847 size_t count = 0;
12848 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12849 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12850 privatedata.data.i32, privatedata.count, count);
12851 if (privatedata.count != count) {
12852 rc = BAD_VALUE;
12853 }
12854 }
12855
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012856 // ISO/Exposure Priority
12857 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12858 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12859 cam_priority_mode_t mode =
12860 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12861 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12862 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12863 use_iso_exp_pty.previewOnly = FALSE;
12864 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12865 use_iso_exp_pty.value = *ptr;
12866
12867 if(CAM_ISO_PRIORITY == mode) {
12868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12869 use_iso_exp_pty)) {
12870 rc = BAD_VALUE;
12871 }
12872 }
12873 else {
12874 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12875 use_iso_exp_pty)) {
12876 rc = BAD_VALUE;
12877 }
12878 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012879
12880 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12881 rc = BAD_VALUE;
12882 }
12883 }
12884 } else {
12885 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12886 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012887 }
12888 }
12889
12890 // Saturation
12891 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12892 int32_t* use_saturation =
12893 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12894 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12895 rc = BAD_VALUE;
12896 }
12897 }
12898
Thierry Strudel3d639192016-09-09 11:52:26 -070012899 // EV step
12900 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12901 gCamCapability[mCameraId]->exp_compensation_step)) {
12902 rc = BAD_VALUE;
12903 }
12904
12905 // CDS info
12906 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12907 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12908 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12909
12910 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12911 CAM_INTF_META_CDS_DATA, *cdsData)) {
12912 rc = BAD_VALUE;
12913 }
12914 }
12915
Shuzhen Wang19463d72016-03-08 11:09:52 -080012916 // Hybrid AE
12917 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12918 uint8_t *hybrid_ae = (uint8_t *)
12919 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12920
12921 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12922 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12923 rc = BAD_VALUE;
12924 }
12925 }
12926
Shuzhen Wang14415f52016-11-16 18:26:18 -080012927 // Histogram
12928 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12929 uint8_t histogramMode =
12930 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12931 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12932 histogramMode)) {
12933 rc = BAD_VALUE;
12934 }
12935 }
12936
12937 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12938 int32_t histogramBins =
12939 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12940 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12941 histogramBins)) {
12942 rc = BAD_VALUE;
12943 }
12944 }
12945
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012946 // Tracking AF
12947 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12948 uint8_t trackingAfTrigger =
12949 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12951 trackingAfTrigger)) {
12952 rc = BAD_VALUE;
12953 }
12954 }
12955
Thierry Strudel3d639192016-09-09 11:52:26 -070012956 return rc;
12957}
12958
12959/*===========================================================================
12960 * FUNCTION : captureResultCb
12961 *
12962 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12963 *
12964 * PARAMETERS :
12965 * @frame : frame information from mm-camera-interface
12966 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12967 * @userdata: userdata
12968 *
12969 * RETURN : NONE
12970 *==========================================================================*/
12971void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12972 camera3_stream_buffer_t *buffer,
12973 uint32_t frame_number, bool isInputBuffer, void *userdata)
12974{
12975 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12976 if (hw == NULL) {
12977 LOGE("Invalid hw %p", hw);
12978 return;
12979 }
12980
12981 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12982 return;
12983}
12984
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012985/*===========================================================================
12986 * FUNCTION : setBufferErrorStatus
12987 *
12988 * DESCRIPTION: Callback handler for channels to report any buffer errors
12989 *
12990 * PARAMETERS :
12991 * @ch : Channel on which buffer error is reported from
12992 * @frame_number : frame number on which buffer error is reported on
12993 * @buffer_status : buffer error status
12994 * @userdata: userdata
12995 *
12996 * RETURN : NONE
12997 *==========================================================================*/
12998void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12999 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13000{
13001 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13002 if (hw == NULL) {
13003 LOGE("Invalid hw %p", hw);
13004 return;
13005 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013006
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013007 hw->setBufferErrorStatus(ch, frame_number, err);
13008 return;
13009}
13010
13011void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13012 uint32_t frameNumber, camera3_buffer_status_t err)
13013{
13014 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13015 pthread_mutex_lock(&mMutex);
13016
13017 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13018 if (req.frame_number != frameNumber)
13019 continue;
13020 for (auto& k : req.mPendingBufferList) {
13021 if(k.stream->priv == ch) {
13022 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13023 }
13024 }
13025 }
13026
13027 pthread_mutex_unlock(&mMutex);
13028 return;
13029}
Thierry Strudel3d639192016-09-09 11:52:26 -070013030/*===========================================================================
13031 * FUNCTION : initialize
13032 *
13033 * DESCRIPTION: Pass framework callback pointers to HAL
13034 *
13035 * PARAMETERS :
13036 *
13037 *
13038 * RETURN : Success : 0
13039 * Failure: -ENODEV
13040 *==========================================================================*/
13041
13042int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13043 const camera3_callback_ops_t *callback_ops)
13044{
13045 LOGD("E");
13046 QCamera3HardwareInterface *hw =
13047 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13048 if (!hw) {
13049 LOGE("NULL camera device");
13050 return -ENODEV;
13051 }
13052
13053 int rc = hw->initialize(callback_ops);
13054 LOGD("X");
13055 return rc;
13056}
13057
13058/*===========================================================================
13059 * FUNCTION : configure_streams
13060 *
13061 * DESCRIPTION:
13062 *
13063 * PARAMETERS :
13064 *
13065 *
13066 * RETURN : Success: 0
13067 * Failure: -EINVAL (if stream configuration is invalid)
13068 * -ENODEV (fatal error)
13069 *==========================================================================*/
13070
13071int QCamera3HardwareInterface::configure_streams(
13072 const struct camera3_device *device,
13073 camera3_stream_configuration_t *stream_list)
13074{
13075 LOGD("E");
13076 QCamera3HardwareInterface *hw =
13077 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13078 if (!hw) {
13079 LOGE("NULL camera device");
13080 return -ENODEV;
13081 }
13082 int rc = hw->configureStreams(stream_list);
13083 LOGD("X");
13084 return rc;
13085}
13086
13087/*===========================================================================
13088 * FUNCTION : construct_default_request_settings
13089 *
13090 * DESCRIPTION: Configure a settings buffer to meet the required use case
13091 *
13092 * PARAMETERS :
13093 *
13094 *
13095 * RETURN : Success: Return valid metadata
13096 * Failure: Return NULL
13097 *==========================================================================*/
13098const camera_metadata_t* QCamera3HardwareInterface::
13099 construct_default_request_settings(const struct camera3_device *device,
13100 int type)
13101{
13102
13103 LOGD("E");
13104 camera_metadata_t* fwk_metadata = NULL;
13105 QCamera3HardwareInterface *hw =
13106 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13107 if (!hw) {
13108 LOGE("NULL camera device");
13109 return NULL;
13110 }
13111
13112 fwk_metadata = hw->translateCapabilityToMetadata(type);
13113
13114 LOGD("X");
13115 return fwk_metadata;
13116}
13117
13118/*===========================================================================
13119 * FUNCTION : process_capture_request
13120 *
13121 * DESCRIPTION:
13122 *
13123 * PARAMETERS :
13124 *
13125 *
13126 * RETURN :
13127 *==========================================================================*/
13128int QCamera3HardwareInterface::process_capture_request(
13129 const struct camera3_device *device,
13130 camera3_capture_request_t *request)
13131{
13132 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013133 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013134 QCamera3HardwareInterface *hw =
13135 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13136 if (!hw) {
13137 LOGE("NULL camera device");
13138 return -EINVAL;
13139 }
13140
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013141 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013142 LOGD("X");
13143 return rc;
13144}
13145
13146/*===========================================================================
13147 * FUNCTION : dump
13148 *
13149 * DESCRIPTION:
13150 *
13151 * PARAMETERS :
13152 *
13153 *
13154 * RETURN :
13155 *==========================================================================*/
13156
13157void QCamera3HardwareInterface::dump(
13158 const struct camera3_device *device, int fd)
13159{
13160 /* Log level property is read when "adb shell dumpsys media.camera" is
13161 called so that the log level can be controlled without restarting
13162 the media server */
13163 getLogLevel();
13164
13165 LOGD("E");
13166 QCamera3HardwareInterface *hw =
13167 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13168 if (!hw) {
13169 LOGE("NULL camera device");
13170 return;
13171 }
13172
13173 hw->dump(fd);
13174 LOGD("X");
13175 return;
13176}
13177
13178/*===========================================================================
13179 * FUNCTION : flush
13180 *
13181 * DESCRIPTION:
13182 *
13183 * PARAMETERS :
13184 *
13185 *
13186 * RETURN :
13187 *==========================================================================*/
13188
13189int QCamera3HardwareInterface::flush(
13190 const struct camera3_device *device)
13191{
13192 int rc;
13193 LOGD("E");
13194 QCamera3HardwareInterface *hw =
13195 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13196 if (!hw) {
13197 LOGE("NULL camera device");
13198 return -EINVAL;
13199 }
13200
13201 pthread_mutex_lock(&hw->mMutex);
13202 // Validate current state
13203 switch (hw->mState) {
13204 case STARTED:
13205 /* valid state */
13206 break;
13207
13208 case ERROR:
13209 pthread_mutex_unlock(&hw->mMutex);
13210 hw->handleCameraDeviceError();
13211 return -ENODEV;
13212
13213 default:
13214 LOGI("Flush returned during state %d", hw->mState);
13215 pthread_mutex_unlock(&hw->mMutex);
13216 return 0;
13217 }
13218 pthread_mutex_unlock(&hw->mMutex);
13219
13220 rc = hw->flush(true /* restart channels */ );
13221 LOGD("X");
13222 return rc;
13223}
13224
13225/*===========================================================================
13226 * FUNCTION : close_camera_device
13227 *
13228 * DESCRIPTION:
13229 *
13230 * PARAMETERS :
13231 *
13232 *
13233 * RETURN :
13234 *==========================================================================*/
13235int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13236{
13237 int ret = NO_ERROR;
13238 QCamera3HardwareInterface *hw =
13239 reinterpret_cast<QCamera3HardwareInterface *>(
13240 reinterpret_cast<camera3_device_t *>(device)->priv);
13241 if (!hw) {
13242 LOGE("NULL camera device");
13243 return BAD_VALUE;
13244 }
13245
13246 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13247 delete hw;
13248 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013249 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013250 return ret;
13251}
13252
13253/*===========================================================================
13254 * FUNCTION : getWaveletDenoiseProcessPlate
13255 *
13256 * DESCRIPTION: query wavelet denoise process plate
13257 *
13258 * PARAMETERS : None
13259 *
13260 * RETURN : WNR prcocess plate value
13261 *==========================================================================*/
13262cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13263{
13264 char prop[PROPERTY_VALUE_MAX];
13265 memset(prop, 0, sizeof(prop));
13266 property_get("persist.denoise.process.plates", prop, "0");
13267 int processPlate = atoi(prop);
13268 switch(processPlate) {
13269 case 0:
13270 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13271 case 1:
13272 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13273 case 2:
13274 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13275 case 3:
13276 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13277 default:
13278 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13279 }
13280}
13281
13282
13283/*===========================================================================
13284 * FUNCTION : getTemporalDenoiseProcessPlate
13285 *
13286 * DESCRIPTION: query temporal denoise process plate
13287 *
13288 * PARAMETERS : None
13289 *
13290 * RETURN : TNR prcocess plate value
13291 *==========================================================================*/
13292cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13293{
13294 char prop[PROPERTY_VALUE_MAX];
13295 memset(prop, 0, sizeof(prop));
13296 property_get("persist.tnr.process.plates", prop, "0");
13297 int processPlate = atoi(prop);
13298 switch(processPlate) {
13299 case 0:
13300 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13301 case 1:
13302 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13303 case 2:
13304 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13305 case 3:
13306 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13307 default:
13308 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13309 }
13310}
13311
13312
13313/*===========================================================================
13314 * FUNCTION : extractSceneMode
13315 *
13316 * DESCRIPTION: Extract scene mode from frameworks set metadata
13317 *
13318 * PARAMETERS :
13319 * @frame_settings: CameraMetadata reference
13320 * @metaMode: ANDROID_CONTORL_MODE
13321 * @hal_metadata: hal metadata structure
13322 *
13323 * RETURN : None
13324 *==========================================================================*/
13325int32_t QCamera3HardwareInterface::extractSceneMode(
13326 const CameraMetadata &frame_settings, uint8_t metaMode,
13327 metadata_buffer_t *hal_metadata)
13328{
13329 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013330 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13331
13332 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13333 LOGD("Ignoring control mode OFF_KEEP_STATE");
13334 return NO_ERROR;
13335 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013336
13337 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13338 camera_metadata_ro_entry entry =
13339 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13340 if (0 == entry.count)
13341 return rc;
13342
13343 uint8_t fwk_sceneMode = entry.data.u8[0];
13344
13345 int val = lookupHalName(SCENE_MODES_MAP,
13346 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13347 fwk_sceneMode);
13348 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013349 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013350 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013351 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013352 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013353
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013354 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13355 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13356 }
13357
13358 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13359 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013360 cam_hdr_param_t hdr_params;
13361 hdr_params.hdr_enable = 1;
13362 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13363 hdr_params.hdr_need_1x = false;
13364 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13365 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13366 rc = BAD_VALUE;
13367 }
13368 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013369
Thierry Strudel3d639192016-09-09 11:52:26 -070013370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13371 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13372 rc = BAD_VALUE;
13373 }
13374 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013375
13376 if (mForceHdrSnapshot) {
13377 cam_hdr_param_t hdr_params;
13378 hdr_params.hdr_enable = 1;
13379 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13380 hdr_params.hdr_need_1x = false;
13381 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13382 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13383 rc = BAD_VALUE;
13384 }
13385 }
13386
Thierry Strudel3d639192016-09-09 11:52:26 -070013387 return rc;
13388}
13389
13390/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013391 * FUNCTION : setVideoHdrMode
13392 *
13393 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13394 *
13395 * PARAMETERS :
13396 * @hal_metadata: hal metadata structure
13397 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13398 *
13399 * RETURN : None
13400 *==========================================================================*/
13401int32_t QCamera3HardwareInterface::setVideoHdrMode(
13402 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13403{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013404 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13405 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13406 }
13407
13408 LOGE("Invalid Video HDR mode %d!", vhdr);
13409 return BAD_VALUE;
13410}
13411
13412/*===========================================================================
13413 * FUNCTION : setSensorHDR
13414 *
13415 * DESCRIPTION: Enable/disable sensor HDR.
13416 *
13417 * PARAMETERS :
13418 * @hal_metadata: hal metadata structure
13419 * @enable: boolean whether to enable/disable sensor HDR
13420 *
13421 * RETURN : None
13422 *==========================================================================*/
13423int32_t QCamera3HardwareInterface::setSensorHDR(
13424 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13425{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013426 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013427 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13428
13429 if (enable) {
13430 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13431 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13432 #ifdef _LE_CAMERA_
13433 //Default to staggered HDR for IOT
13434 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13435 #else
13436 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13437 #endif
13438 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13439 }
13440
13441 bool isSupported = false;
13442 switch (sensor_hdr) {
13443 case CAM_SENSOR_HDR_IN_SENSOR:
13444 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13445 CAM_QCOM_FEATURE_SENSOR_HDR) {
13446 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013447 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013448 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013449 break;
13450 case CAM_SENSOR_HDR_ZIGZAG:
13451 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13452 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13453 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013454 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013455 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013456 break;
13457 case CAM_SENSOR_HDR_STAGGERED:
13458 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13459 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13460 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013461 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013462 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013463 break;
13464 case CAM_SENSOR_HDR_OFF:
13465 isSupported = true;
13466 LOGD("Turning off sensor HDR");
13467 break;
13468 default:
13469 LOGE("HDR mode %d not supported", sensor_hdr);
13470 rc = BAD_VALUE;
13471 break;
13472 }
13473
13474 if(isSupported) {
13475 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13476 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13477 rc = BAD_VALUE;
13478 } else {
13479 if(!isVideoHdrEnable)
13480 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013481 }
13482 }
13483 return rc;
13484}
13485
13486/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013487 * FUNCTION : needRotationReprocess
13488 *
13489 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13490 *
13491 * PARAMETERS : none
13492 *
13493 * RETURN : true: needed
13494 * false: no need
13495 *==========================================================================*/
13496bool QCamera3HardwareInterface::needRotationReprocess()
13497{
13498 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13499 // current rotation is not zero, and pp has the capability to process rotation
13500 LOGH("need do reprocess for rotation");
13501 return true;
13502 }
13503
13504 return false;
13505}
13506
13507/*===========================================================================
13508 * FUNCTION : needReprocess
13509 *
13510 * DESCRIPTION: if reprocess in needed
13511 *
13512 * PARAMETERS : none
13513 *
13514 * RETURN : true: needed
13515 * false: no need
13516 *==========================================================================*/
13517bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13518{
13519 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13520 // TODO: add for ZSL HDR later
13521 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13522 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13523 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13524 return true;
13525 } else {
13526 LOGH("already post processed frame");
13527 return false;
13528 }
13529 }
13530 return needRotationReprocess();
13531}
13532
13533/*===========================================================================
13534 * FUNCTION : needJpegExifRotation
13535 *
13536 * DESCRIPTION: if rotation from jpeg is needed
13537 *
13538 * PARAMETERS : none
13539 *
13540 * RETURN : true: needed
13541 * false: no need
13542 *==========================================================================*/
13543bool QCamera3HardwareInterface::needJpegExifRotation()
13544{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013545 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013546 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13547 LOGD("Need use Jpeg EXIF Rotation");
13548 return true;
13549 }
13550 return false;
13551}
13552
13553/*===========================================================================
13554 * FUNCTION : addOfflineReprocChannel
13555 *
13556 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13557 * coming from input channel
13558 *
13559 * PARAMETERS :
13560 * @config : reprocess configuration
13561 * @inputChHandle : pointer to the input (source) channel
13562 *
13563 *
13564 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13565 *==========================================================================*/
13566QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13567 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13568{
13569 int32_t rc = NO_ERROR;
13570 QCamera3ReprocessChannel *pChannel = NULL;
13571
13572 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013573 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13574 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013575 if (NULL == pChannel) {
13576 LOGE("no mem for reprocess channel");
13577 return NULL;
13578 }
13579
13580 rc = pChannel->initialize(IS_TYPE_NONE);
13581 if (rc != NO_ERROR) {
13582 LOGE("init reprocess channel failed, ret = %d", rc);
13583 delete pChannel;
13584 return NULL;
13585 }
13586
13587 // pp feature config
13588 cam_pp_feature_config_t pp_config;
13589 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13590
13591 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13592 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13593 & CAM_QCOM_FEATURE_DSDN) {
13594 //Use CPP CDS incase h/w supports it.
13595 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13596 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13597 }
13598 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13599 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13600 }
13601
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013602 if (config.hdr_param.hdr_enable) {
13603 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13604 pp_config.hdr_param = config.hdr_param;
13605 }
13606
13607 if (mForceHdrSnapshot) {
13608 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13609 pp_config.hdr_param.hdr_enable = 1;
13610 pp_config.hdr_param.hdr_need_1x = 0;
13611 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13612 }
13613
Thierry Strudel3d639192016-09-09 11:52:26 -070013614 rc = pChannel->addReprocStreamsFromSource(pp_config,
13615 config,
13616 IS_TYPE_NONE,
13617 mMetadataChannel);
13618
13619 if (rc != NO_ERROR) {
13620 delete pChannel;
13621 return NULL;
13622 }
13623 return pChannel;
13624}
13625
13626/*===========================================================================
13627 * FUNCTION : getMobicatMask
13628 *
13629 * DESCRIPTION: returns mobicat mask
13630 *
13631 * PARAMETERS : none
13632 *
13633 * RETURN : mobicat mask
13634 *
13635 *==========================================================================*/
13636uint8_t QCamera3HardwareInterface::getMobicatMask()
13637{
13638 return m_MobicatMask;
13639}
13640
13641/*===========================================================================
13642 * FUNCTION : setMobicat
13643 *
13644 * DESCRIPTION: set Mobicat on/off.
13645 *
13646 * PARAMETERS :
13647 * @params : none
13648 *
13649 * RETURN : int32_t type of status
13650 * NO_ERROR -- success
13651 * none-zero failure code
13652 *==========================================================================*/
13653int32_t QCamera3HardwareInterface::setMobicat()
13654{
13655 char value [PROPERTY_VALUE_MAX];
13656 property_get("persist.camera.mobicat", value, "0");
13657 int32_t ret = NO_ERROR;
13658 uint8_t enableMobi = (uint8_t)atoi(value);
13659
13660 if (enableMobi) {
13661 tune_cmd_t tune_cmd;
13662 tune_cmd.type = SET_RELOAD_CHROMATIX;
13663 tune_cmd.module = MODULE_ALL;
13664 tune_cmd.value = TRUE;
13665 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13666 CAM_INTF_PARM_SET_VFE_COMMAND,
13667 tune_cmd);
13668
13669 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13670 CAM_INTF_PARM_SET_PP_COMMAND,
13671 tune_cmd);
13672 }
13673 m_MobicatMask = enableMobi;
13674
13675 return ret;
13676}
13677
13678/*===========================================================================
13679* FUNCTION : getLogLevel
13680*
13681* DESCRIPTION: Reads the log level property into a variable
13682*
13683* PARAMETERS :
13684* None
13685*
13686* RETURN :
13687* None
13688*==========================================================================*/
13689void QCamera3HardwareInterface::getLogLevel()
13690{
13691 char prop[PROPERTY_VALUE_MAX];
13692 uint32_t globalLogLevel = 0;
13693
13694 property_get("persist.camera.hal.debug", prop, "0");
13695 int val = atoi(prop);
13696 if (0 <= val) {
13697 gCamHal3LogLevel = (uint32_t)val;
13698 }
13699
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013700 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013701 gKpiDebugLevel = atoi(prop);
13702
13703 property_get("persist.camera.global.debug", prop, "0");
13704 val = atoi(prop);
13705 if (0 <= val) {
13706 globalLogLevel = (uint32_t)val;
13707 }
13708
13709 /* Highest log level among hal.logs and global.logs is selected */
13710 if (gCamHal3LogLevel < globalLogLevel)
13711 gCamHal3LogLevel = globalLogLevel;
13712
13713 return;
13714}
13715
13716/*===========================================================================
13717 * FUNCTION : validateStreamRotations
13718 *
13719 * DESCRIPTION: Check if the rotations requested are supported
13720 *
13721 * PARAMETERS :
13722 * @stream_list : streams to be configured
13723 *
13724 * RETURN : NO_ERROR on success
13725 * -EINVAL on failure
13726 *
13727 *==========================================================================*/
13728int QCamera3HardwareInterface::validateStreamRotations(
13729 camera3_stream_configuration_t *streamList)
13730{
13731 int rc = NO_ERROR;
13732
13733 /*
13734 * Loop through all streams requested in configuration
13735 * Check if unsupported rotations have been requested on any of them
13736 */
13737 for (size_t j = 0; j < streamList->num_streams; j++){
13738 camera3_stream_t *newStream = streamList->streams[j];
13739
Emilian Peev35ceeed2017-06-29 11:58:56 -070013740 switch(newStream->rotation) {
13741 case CAMERA3_STREAM_ROTATION_0:
13742 case CAMERA3_STREAM_ROTATION_90:
13743 case CAMERA3_STREAM_ROTATION_180:
13744 case CAMERA3_STREAM_ROTATION_270:
13745 //Expected values
13746 break;
13747 default:
13748 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13749 "type:%d and stream format:%d", __func__,
13750 newStream->rotation, newStream->stream_type,
13751 newStream->format);
13752 return -EINVAL;
13753 }
13754
Thierry Strudel3d639192016-09-09 11:52:26 -070013755 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13756 bool isImplDef = (newStream->format ==
13757 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13758 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13759 isImplDef);
13760
13761 if (isRotated && (!isImplDef || isZsl)) {
13762 LOGE("Error: Unsupported rotation of %d requested for stream"
13763 "type:%d and stream format:%d",
13764 newStream->rotation, newStream->stream_type,
13765 newStream->format);
13766 rc = -EINVAL;
13767 break;
13768 }
13769 }
13770
13771 return rc;
13772}
13773
13774/*===========================================================================
13775* FUNCTION : getFlashInfo
13776*
13777* DESCRIPTION: Retrieve information about whether the device has a flash.
13778*
13779* PARAMETERS :
13780* @cameraId : Camera id to query
13781* @hasFlash : Boolean indicating whether there is a flash device
13782* associated with given camera
13783* @flashNode : If a flash device exists, this will be its device node.
13784*
13785* RETURN :
13786* None
13787*==========================================================================*/
13788void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13789 bool& hasFlash,
13790 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13791{
13792 cam_capability_t* camCapability = gCamCapability[cameraId];
13793 if (NULL == camCapability) {
13794 hasFlash = false;
13795 flashNode[0] = '\0';
13796 } else {
13797 hasFlash = camCapability->flash_available;
13798 strlcpy(flashNode,
13799 (char*)camCapability->flash_dev_name,
13800 QCAMERA_MAX_FILEPATH_LENGTH);
13801 }
13802}
13803
13804/*===========================================================================
13805* FUNCTION : getEepromVersionInfo
13806*
13807* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13808*
13809* PARAMETERS : None
13810*
13811* RETURN : string describing EEPROM version
13812* "\0" if no such info available
13813*==========================================================================*/
13814const char *QCamera3HardwareInterface::getEepromVersionInfo()
13815{
13816 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13817}
13818
13819/*===========================================================================
13820* FUNCTION : getLdafCalib
13821*
13822* DESCRIPTION: Retrieve Laser AF calibration data
13823*
13824* PARAMETERS : None
13825*
13826* RETURN : Two uint32_t describing laser AF calibration data
13827* NULL if none is available.
13828*==========================================================================*/
13829const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13830{
13831 if (mLdafCalibExist) {
13832 return &mLdafCalib[0];
13833 } else {
13834 return NULL;
13835 }
13836}
13837
13838/*===========================================================================
13839 * FUNCTION : dynamicUpdateMetaStreamInfo
13840 *
13841 * DESCRIPTION: This function:
13842 * (1) stops all the channels
13843 * (2) returns error on pending requests and buffers
13844 * (3) sends metastream_info in setparams
13845 * (4) starts all channels
13846 * This is useful when sensor has to be restarted to apply any
13847 * settings such as frame rate from a different sensor mode
13848 *
13849 * PARAMETERS : None
13850 *
13851 * RETURN : NO_ERROR on success
13852 * Error codes on failure
13853 *
13854 *==========================================================================*/
13855int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13856{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013857 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013858 int rc = NO_ERROR;
13859
13860 LOGD("E");
13861
13862 rc = stopAllChannels();
13863 if (rc < 0) {
13864 LOGE("stopAllChannels failed");
13865 return rc;
13866 }
13867
13868 rc = notifyErrorForPendingRequests();
13869 if (rc < 0) {
13870 LOGE("notifyErrorForPendingRequests failed");
13871 return rc;
13872 }
13873
13874 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13875 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13876 "Format:%d",
13877 mStreamConfigInfo.type[i],
13878 mStreamConfigInfo.stream_sizes[i].width,
13879 mStreamConfigInfo.stream_sizes[i].height,
13880 mStreamConfigInfo.postprocess_mask[i],
13881 mStreamConfigInfo.format[i]);
13882 }
13883
13884 /* Send meta stream info once again so that ISP can start */
13885 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13886 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13887 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13888 mParameters);
13889 if (rc < 0) {
13890 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13891 }
13892
13893 rc = startAllChannels();
13894 if (rc < 0) {
13895 LOGE("startAllChannels failed");
13896 return rc;
13897 }
13898
13899 LOGD("X");
13900 return rc;
13901}
13902
13903/*===========================================================================
13904 * FUNCTION : stopAllChannels
13905 *
13906 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13907 *
13908 * PARAMETERS : None
13909 *
13910 * RETURN : NO_ERROR on success
13911 * Error codes on failure
13912 *
13913 *==========================================================================*/
13914int32_t QCamera3HardwareInterface::stopAllChannels()
13915{
13916 int32_t rc = NO_ERROR;
13917
13918 LOGD("Stopping all channels");
13919 // Stop the Streams/Channels
13920 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13921 it != mStreamInfo.end(); it++) {
13922 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13923 if (channel) {
13924 channel->stop();
13925 }
13926 (*it)->status = INVALID;
13927 }
13928
13929 if (mSupportChannel) {
13930 mSupportChannel->stop();
13931 }
13932 if (mAnalysisChannel) {
13933 mAnalysisChannel->stop();
13934 }
13935 if (mRawDumpChannel) {
13936 mRawDumpChannel->stop();
13937 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013938 if (mHdrPlusRawSrcChannel) {
13939 mHdrPlusRawSrcChannel->stop();
13940 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013941 if (mMetadataChannel) {
13942 /* If content of mStreamInfo is not 0, there is metadata stream */
13943 mMetadataChannel->stop();
13944 }
13945
13946 LOGD("All channels stopped");
13947 return rc;
13948}
13949
13950/*===========================================================================
13951 * FUNCTION : startAllChannels
13952 *
13953 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13954 *
13955 * PARAMETERS : None
13956 *
13957 * RETURN : NO_ERROR on success
13958 * Error codes on failure
13959 *
13960 *==========================================================================*/
13961int32_t QCamera3HardwareInterface::startAllChannels()
13962{
13963 int32_t rc = NO_ERROR;
13964
13965 LOGD("Start all channels ");
13966 // Start the Streams/Channels
13967 if (mMetadataChannel) {
13968 /* If content of mStreamInfo is not 0, there is metadata stream */
13969 rc = mMetadataChannel->start();
13970 if (rc < 0) {
13971 LOGE("META channel start failed");
13972 return rc;
13973 }
13974 }
13975 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13976 it != mStreamInfo.end(); it++) {
13977 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13978 if (channel) {
13979 rc = channel->start();
13980 if (rc < 0) {
13981 LOGE("channel start failed");
13982 return rc;
13983 }
13984 }
13985 }
13986 if (mAnalysisChannel) {
13987 mAnalysisChannel->start();
13988 }
13989 if (mSupportChannel) {
13990 rc = mSupportChannel->start();
13991 if (rc < 0) {
13992 LOGE("Support channel start failed");
13993 return rc;
13994 }
13995 }
13996 if (mRawDumpChannel) {
13997 rc = mRawDumpChannel->start();
13998 if (rc < 0) {
13999 LOGE("RAW dump channel start failed");
14000 return rc;
14001 }
14002 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014003 if (mHdrPlusRawSrcChannel) {
14004 rc = mHdrPlusRawSrcChannel->start();
14005 if (rc < 0) {
14006 LOGE("HDR+ RAW channel start failed");
14007 return rc;
14008 }
14009 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014010
14011 LOGD("All channels started");
14012 return rc;
14013}
14014
14015/*===========================================================================
14016 * FUNCTION : notifyErrorForPendingRequests
14017 *
14018 * DESCRIPTION: This function sends error for all the pending requests/buffers
14019 *
14020 * PARAMETERS : None
14021 *
14022 * RETURN : Error codes
14023 * NO_ERROR on success
14024 *
14025 *==========================================================================*/
14026int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14027{
Emilian Peev7650c122017-01-19 08:24:33 -080014028 notifyErrorFoPendingDepthData(mDepthChannel);
14029
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014030 auto pendingRequest = mPendingRequestsList.begin();
14031 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014032
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014033 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14034 // buffers (for which buffers aren't sent yet).
14035 while (pendingRequest != mPendingRequestsList.end() ||
14036 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14037 if (pendingRequest == mPendingRequestsList.end() ||
14038 pendingBuffer->frame_number < pendingRequest->frame_number) {
14039 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14040 // with error.
14041 for (auto &info : pendingBuffer->mPendingBufferList) {
14042 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014043 camera3_notify_msg_t notify_msg;
14044 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14045 notify_msg.type = CAMERA3_MSG_ERROR;
14046 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014047 notify_msg.message.error.error_stream = info.stream;
14048 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014049 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014050
14051 camera3_stream_buffer_t buffer = {};
14052 buffer.acquire_fence = -1;
14053 buffer.release_fence = -1;
14054 buffer.buffer = info.buffer;
14055 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14056 buffer.stream = info.stream;
14057 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014058 }
14059
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014060 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14061 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14062 pendingBuffer->frame_number > pendingRequest->frame_number) {
14063 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014064 camera3_notify_msg_t notify_msg;
14065 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14066 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014067 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14068 notify_msg.message.error.error_stream = nullptr;
14069 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014070 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014071
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014072 if (pendingRequest->input_buffer != nullptr) {
14073 camera3_capture_result result = {};
14074 result.frame_number = pendingRequest->frame_number;
14075 result.result = nullptr;
14076 result.input_buffer = pendingRequest->input_buffer;
14077 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014078 }
14079
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014080 mShutterDispatcher.clear(pendingRequest->frame_number);
14081 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14082 } else {
14083 // If both buffers and result metadata weren't sent yet, notify about a request error
14084 // and return buffers with error.
14085 for (auto &info : pendingBuffer->mPendingBufferList) {
14086 camera3_notify_msg_t notify_msg;
14087 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14088 notify_msg.type = CAMERA3_MSG_ERROR;
14089 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14090 notify_msg.message.error.error_stream = info.stream;
14091 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14092 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014093
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014094 camera3_stream_buffer_t buffer = {};
14095 buffer.acquire_fence = -1;
14096 buffer.release_fence = -1;
14097 buffer.buffer = info.buffer;
14098 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14099 buffer.stream = info.stream;
14100 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14101 }
14102
14103 if (pendingRequest->input_buffer != nullptr) {
14104 camera3_capture_result result = {};
14105 result.frame_number = pendingRequest->frame_number;
14106 result.result = nullptr;
14107 result.input_buffer = pendingRequest->input_buffer;
14108 orchestrateResult(&result);
14109 }
14110
14111 mShutterDispatcher.clear(pendingRequest->frame_number);
14112 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14113 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014114 }
14115 }
14116
14117 /* Reset pending frame Drop list and requests list */
14118 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014119 mShutterDispatcher.clear();
14120 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014121 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014122 LOGH("Cleared all the pending buffers ");
14123
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014124 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014125}
14126
14127bool QCamera3HardwareInterface::isOnEncoder(
14128 const cam_dimension_t max_viewfinder_size,
14129 uint32_t width, uint32_t height)
14130{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014131 return ((width > (uint32_t)max_viewfinder_size.width) ||
14132 (height > (uint32_t)max_viewfinder_size.height) ||
14133 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14134 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014135}
14136
14137/*===========================================================================
14138 * FUNCTION : setBundleInfo
14139 *
14140 * DESCRIPTION: Set bundle info for all streams that are bundle.
14141 *
14142 * PARAMETERS : None
14143 *
14144 * RETURN : NO_ERROR on success
14145 * Error codes on failure
14146 *==========================================================================*/
14147int32_t QCamera3HardwareInterface::setBundleInfo()
14148{
14149 int32_t rc = NO_ERROR;
14150
14151 if (mChannelHandle) {
14152 cam_bundle_config_t bundleInfo;
14153 memset(&bundleInfo, 0, sizeof(bundleInfo));
14154 rc = mCameraHandle->ops->get_bundle_info(
14155 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14156 if (rc != NO_ERROR) {
14157 LOGE("get_bundle_info failed");
14158 return rc;
14159 }
14160 if (mAnalysisChannel) {
14161 mAnalysisChannel->setBundleInfo(bundleInfo);
14162 }
14163 if (mSupportChannel) {
14164 mSupportChannel->setBundleInfo(bundleInfo);
14165 }
14166 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14167 it != mStreamInfo.end(); it++) {
14168 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14169 channel->setBundleInfo(bundleInfo);
14170 }
14171 if (mRawDumpChannel) {
14172 mRawDumpChannel->setBundleInfo(bundleInfo);
14173 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014174 if (mHdrPlusRawSrcChannel) {
14175 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14176 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014177 }
14178
14179 return rc;
14180}
14181
14182/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014183 * FUNCTION : setInstantAEC
14184 *
14185 * DESCRIPTION: Set Instant AEC related params.
14186 *
14187 * PARAMETERS :
14188 * @meta: CameraMetadata reference
14189 *
14190 * RETURN : NO_ERROR on success
14191 * Error codes on failure
14192 *==========================================================================*/
14193int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14194{
14195 int32_t rc = NO_ERROR;
14196 uint8_t val = 0;
14197 char prop[PROPERTY_VALUE_MAX];
14198
14199 // First try to configure instant AEC from framework metadata
14200 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14201 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14202 }
14203
14204 // If framework did not set this value, try to read from set prop.
14205 if (val == 0) {
14206 memset(prop, 0, sizeof(prop));
14207 property_get("persist.camera.instant.aec", prop, "0");
14208 val = (uint8_t)atoi(prop);
14209 }
14210
14211 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14212 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14213 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14214 mInstantAEC = val;
14215 mInstantAECSettledFrameNumber = 0;
14216 mInstantAecFrameIdxCount = 0;
14217 LOGH("instantAEC value set %d",val);
14218 if (mInstantAEC) {
14219 memset(prop, 0, sizeof(prop));
14220 property_get("persist.camera.ae.instant.bound", prop, "10");
14221 int32_t aec_frame_skip_cnt = atoi(prop);
14222 if (aec_frame_skip_cnt >= 0) {
14223 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14224 } else {
14225 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14226 rc = BAD_VALUE;
14227 }
14228 }
14229 } else {
14230 LOGE("Bad instant aec value set %d", val);
14231 rc = BAD_VALUE;
14232 }
14233 return rc;
14234}
14235
14236/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014237 * FUNCTION : get_num_overall_buffers
14238 *
14239 * DESCRIPTION: Estimate number of pending buffers across all requests.
14240 *
14241 * PARAMETERS : None
14242 *
14243 * RETURN : Number of overall pending buffers
14244 *
14245 *==========================================================================*/
14246uint32_t PendingBuffersMap::get_num_overall_buffers()
14247{
14248 uint32_t sum_buffers = 0;
14249 for (auto &req : mPendingBuffersInRequest) {
14250 sum_buffers += req.mPendingBufferList.size();
14251 }
14252 return sum_buffers;
14253}
14254
14255/*===========================================================================
14256 * FUNCTION : removeBuf
14257 *
14258 * DESCRIPTION: Remove a matching buffer from tracker.
14259 *
14260 * PARAMETERS : @buffer: image buffer for the callback
14261 *
14262 * RETURN : None
14263 *
14264 *==========================================================================*/
14265void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14266{
14267 bool buffer_found = false;
14268 for (auto req = mPendingBuffersInRequest.begin();
14269 req != mPendingBuffersInRequest.end(); req++) {
14270 for (auto k = req->mPendingBufferList.begin();
14271 k != req->mPendingBufferList.end(); k++ ) {
14272 if (k->buffer == buffer) {
14273 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14274 req->frame_number, buffer);
14275 k = req->mPendingBufferList.erase(k);
14276 if (req->mPendingBufferList.empty()) {
14277 // Remove this request from Map
14278 req = mPendingBuffersInRequest.erase(req);
14279 }
14280 buffer_found = true;
14281 break;
14282 }
14283 }
14284 if (buffer_found) {
14285 break;
14286 }
14287 }
14288 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14289 get_num_overall_buffers());
14290}
14291
14292/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014293 * FUNCTION : getBufErrStatus
14294 *
14295 * DESCRIPTION: get buffer error status
14296 *
14297 * PARAMETERS : @buffer: buffer handle
14298 *
14299 * RETURN : Error status
14300 *
14301 *==========================================================================*/
14302int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14303{
14304 for (auto& req : mPendingBuffersInRequest) {
14305 for (auto& k : req.mPendingBufferList) {
14306 if (k.buffer == buffer)
14307 return k.bufStatus;
14308 }
14309 }
14310 return CAMERA3_BUFFER_STATUS_OK;
14311}
14312
14313/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014314 * FUNCTION : setPAAFSupport
14315 *
14316 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14317 * feature mask according to stream type and filter
14318 * arrangement
14319 *
14320 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14321 * @stream_type: stream type
14322 * @filter_arrangement: filter arrangement
14323 *
14324 * RETURN : None
14325 *==========================================================================*/
14326void QCamera3HardwareInterface::setPAAFSupport(
14327 cam_feature_mask_t& feature_mask,
14328 cam_stream_type_t stream_type,
14329 cam_color_filter_arrangement_t filter_arrangement)
14330{
Thierry Strudel3d639192016-09-09 11:52:26 -070014331 switch (filter_arrangement) {
14332 case CAM_FILTER_ARRANGEMENT_RGGB:
14333 case CAM_FILTER_ARRANGEMENT_GRBG:
14334 case CAM_FILTER_ARRANGEMENT_GBRG:
14335 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014336 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14337 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014338 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014339 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14340 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014341 }
14342 break;
14343 case CAM_FILTER_ARRANGEMENT_Y:
14344 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14345 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14346 }
14347 break;
14348 default:
14349 break;
14350 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014351 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14352 feature_mask, stream_type, filter_arrangement);
14353
14354
Thierry Strudel3d639192016-09-09 11:52:26 -070014355}
14356
14357/*===========================================================================
14358* FUNCTION : getSensorMountAngle
14359*
14360* DESCRIPTION: Retrieve sensor mount angle
14361*
14362* PARAMETERS : None
14363*
14364* RETURN : sensor mount angle in uint32_t
14365*==========================================================================*/
14366uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14367{
14368 return gCamCapability[mCameraId]->sensor_mount_angle;
14369}
14370
14371/*===========================================================================
14372* FUNCTION : getRelatedCalibrationData
14373*
14374* DESCRIPTION: Retrieve related system calibration data
14375*
14376* PARAMETERS : None
14377*
14378* RETURN : Pointer of related system calibration data
14379*==========================================================================*/
14380const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14381{
14382 return (const cam_related_system_calibration_data_t *)
14383 &(gCamCapability[mCameraId]->related_cam_calibration);
14384}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014385
14386/*===========================================================================
14387 * FUNCTION : is60HzZone
14388 *
14389 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14390 *
14391 * PARAMETERS : None
14392 *
14393 * RETURN : True if in 60Hz zone, False otherwise
14394 *==========================================================================*/
14395bool QCamera3HardwareInterface::is60HzZone()
14396{
14397 time_t t = time(NULL);
14398 struct tm lt;
14399
14400 struct tm* r = localtime_r(&t, &lt);
14401
14402 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14403 return true;
14404 else
14405 return false;
14406}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014407
14408/*===========================================================================
14409 * FUNCTION : adjustBlackLevelForCFA
14410 *
14411 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14412 * of bayer CFA (Color Filter Array).
14413 *
14414 * PARAMETERS : @input: black level pattern in the order of RGGB
14415 * @output: black level pattern in the order of CFA
14416 * @color_arrangement: CFA color arrangement
14417 *
14418 * RETURN : None
14419 *==========================================================================*/
14420template<typename T>
14421void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14422 T input[BLACK_LEVEL_PATTERN_CNT],
14423 T output[BLACK_LEVEL_PATTERN_CNT],
14424 cam_color_filter_arrangement_t color_arrangement)
14425{
14426 switch (color_arrangement) {
14427 case CAM_FILTER_ARRANGEMENT_GRBG:
14428 output[0] = input[1];
14429 output[1] = input[0];
14430 output[2] = input[3];
14431 output[3] = input[2];
14432 break;
14433 case CAM_FILTER_ARRANGEMENT_GBRG:
14434 output[0] = input[2];
14435 output[1] = input[3];
14436 output[2] = input[0];
14437 output[3] = input[1];
14438 break;
14439 case CAM_FILTER_ARRANGEMENT_BGGR:
14440 output[0] = input[3];
14441 output[1] = input[2];
14442 output[2] = input[1];
14443 output[3] = input[0];
14444 break;
14445 case CAM_FILTER_ARRANGEMENT_RGGB:
14446 output[0] = input[0];
14447 output[1] = input[1];
14448 output[2] = input[2];
14449 output[3] = input[3];
14450 break;
14451 default:
14452 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14453 break;
14454 }
14455}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014456
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014457void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14458 CameraMetadata &resultMetadata,
14459 std::shared_ptr<metadata_buffer_t> settings)
14460{
14461 if (settings == nullptr) {
14462 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14463 return;
14464 }
14465
14466 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14467 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14468 }
14469
14470 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14471 String8 str((const char *)gps_methods);
14472 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14473 }
14474
14475 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14476 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14477 }
14478
14479 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14480 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14481 }
14482
14483 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14484 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14485 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14486 }
14487
14488 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14489 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14490 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14491 }
14492
14493 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14494 int32_t fwk_thumb_size[2];
14495 fwk_thumb_size[0] = thumb_size->width;
14496 fwk_thumb_size[1] = thumb_size->height;
14497 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14498 }
14499
14500 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14501 uint8_t fwk_intent = intent[0];
14502 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14503 }
14504}
14505
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014506bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14507 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14508 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014509{
14510 if (hdrPlusRequest == nullptr) return false;
14511
14512 // Check noise reduction mode is high quality.
14513 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14514 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14515 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014516 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14517 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014518 return false;
14519 }
14520
14521 // Check edge mode is high quality.
14522 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14523 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14524 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14525 return false;
14526 }
14527
14528 if (request.num_output_buffers != 1 ||
14529 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14530 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014531 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14532 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14533 request.output_buffers[0].stream->width,
14534 request.output_buffers[0].stream->height,
14535 request.output_buffers[0].stream->format);
14536 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014537 return false;
14538 }
14539
14540 // Get a YUV buffer from pic channel.
14541 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14542 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14543 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14544 if (res != OK) {
14545 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14546 __FUNCTION__, strerror(-res), res);
14547 return false;
14548 }
14549
14550 pbcamera::StreamBuffer buffer;
14551 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014552 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014553 buffer.data = yuvBuffer->buffer;
14554 buffer.dataSize = yuvBuffer->frame_len;
14555
14556 pbcamera::CaptureRequest pbRequest;
14557 pbRequest.id = request.frame_number;
14558 pbRequest.outputBuffers.push_back(buffer);
14559
14560 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070014561 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014562 if (res != OK) {
14563 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14564 strerror(-res), res);
14565 return false;
14566 }
14567
14568 hdrPlusRequest->yuvBuffer = yuvBuffer;
14569 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14570
14571 return true;
14572}
14573
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014574status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14575{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014576 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14577 return OK;
14578 }
14579
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014580 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014581 if (res != OK) {
14582 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14583 strerror(-res), res);
14584 return res;
14585 }
14586 gHdrPlusClientOpening = true;
14587
14588 return OK;
14589}
14590
Chien-Yu Chenee335912017-02-09 17:53:20 -080014591status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14592{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014593 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014594
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014595 if (mHdrPlusModeEnabled) {
14596 return OK;
14597 }
14598
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014599 // Check if gHdrPlusClient is opened or being opened.
14600 if (gHdrPlusClient == nullptr) {
14601 if (gHdrPlusClientOpening) {
14602 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14603 return OK;
14604 }
14605
14606 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014607 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014608 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14609 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014610 return res;
14611 }
14612
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014613 // When opening HDR+ client completes, HDR+ mode will be enabled.
14614 return OK;
14615
Chien-Yu Chenee335912017-02-09 17:53:20 -080014616 }
14617
14618 // Configure stream for HDR+.
14619 res = configureHdrPlusStreamsLocked();
14620 if (res != OK) {
14621 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014622 return res;
14623 }
14624
14625 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14626 res = gHdrPlusClient->setZslHdrPlusMode(true);
14627 if (res != OK) {
14628 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014629 return res;
14630 }
14631
14632 mHdrPlusModeEnabled = true;
14633 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14634
14635 return OK;
14636}
14637
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014638void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14639{
14640 if (gHdrPlusClientOpening) {
14641 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14642 }
14643}
14644
Chien-Yu Chenee335912017-02-09 17:53:20 -080014645void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14646{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014647 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014648 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014649 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14650 if (res != OK) {
14651 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14652 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014653
14654 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014655 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014656 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014657 }
14658
14659 mHdrPlusModeEnabled = false;
14660 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14661}
14662
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070014663bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
14664{
14665 // Check if mPictureChannel is valid.
14666 // TODO: Support YUV (b/36693254) and RAW (b/36690506)
14667 if (mPictureChannel == nullptr) {
14668 return false;
14669 }
14670
14671 return true;
14672}
14673
Chien-Yu Chenee335912017-02-09 17:53:20 -080014674status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014675{
14676 pbcamera::InputConfiguration inputConfig;
14677 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14678 status_t res = OK;
14679
14680 // Configure HDR+ client streams.
14681 // Get input config.
14682 if (mHdrPlusRawSrcChannel) {
14683 // HDR+ input buffers will be provided by HAL.
14684 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14685 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14686 if (res != OK) {
14687 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14688 __FUNCTION__, strerror(-res), res);
14689 return res;
14690 }
14691
14692 inputConfig.isSensorInput = false;
14693 } else {
14694 // Sensor MIPI will send data to Easel.
14695 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014696 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014697 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14698 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14699 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14700 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14701 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014702 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014703 if (mSensorModeInfo.num_raw_bits != 10) {
14704 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14705 mSensorModeInfo.num_raw_bits);
14706 return BAD_VALUE;
14707 }
14708
14709 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014710 }
14711
14712 // Get output configurations.
14713 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014714 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014715
14716 // Easel may need to output YUV output buffers if mPictureChannel was created.
14717 pbcamera::StreamConfiguration yuvOutputConfig;
14718 if (mPictureChannel != nullptr) {
14719 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14720 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14721 if (res != OK) {
14722 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14723 __FUNCTION__, strerror(-res), res);
14724
14725 return res;
14726 }
14727
14728 outputStreamConfigs.push_back(yuvOutputConfig);
14729 }
14730
14731 // TODO: consider other channels for YUV output buffers.
14732
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014733 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014734 if (res != OK) {
14735 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14736 strerror(-res), res);
14737 return res;
14738 }
14739
14740 return OK;
14741}
14742
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014743void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14744{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014745 if (client == nullptr) {
14746 ALOGE("%s: Opened client is null.", __FUNCTION__);
14747 return;
14748 }
14749
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014750 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014751 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14752
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014753 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014754 if (!gHdrPlusClientOpening) {
14755 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14756 return;
14757 }
14758
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014759 gHdrPlusClient = std::move(client);
14760 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014761 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014762
14763 // Set static metadata.
14764 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14765 if (res != OK) {
14766 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14767 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014768 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014769 gHdrPlusClient = nullptr;
14770 return;
14771 }
14772
14773 // Enable HDR+ mode.
14774 res = enableHdrPlusModeLocked();
14775 if (res != OK) {
14776 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14777 }
14778}
14779
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014780void QCamera3HardwareInterface::onOpenFailed(status_t err)
14781{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014782 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014783 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014784 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014785 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014786}
14787
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014788void QCamera3HardwareInterface::onFatalError()
14789{
14790 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14791
14792 // Set HAL state to error.
14793 pthread_mutex_lock(&mMutex);
14794 mState = ERROR;
14795 pthread_mutex_unlock(&mMutex);
14796
14797 handleCameraDeviceError();
14798}
14799
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070014800void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
14801{
14802 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
14803 __LINE__, requestId, apSensorTimestampNs);
14804
14805 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
14806}
14807
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014808void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014809 const camera_metadata_t &resultMetadata)
14810{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014811 if (result != nullptr) {
14812 if (result->outputBuffers.size() != 1) {
14813 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14814 result->outputBuffers.size());
14815 return;
14816 }
14817
14818 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14819 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14820 result->outputBuffers[0].streamId);
14821 return;
14822 }
14823
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014824 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014825 HdrPlusPendingRequest pendingRequest;
14826 {
14827 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14828 auto req = mHdrPlusPendingRequests.find(result->requestId);
14829 pendingRequest = req->second;
14830 }
14831
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014832 // Update the result metadata with the settings of the HDR+ still capture request because
14833 // the result metadata belongs to a ZSL buffer.
14834 CameraMetadata metadata;
14835 metadata = &resultMetadata;
14836 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14837 camera_metadata_t* updatedResultMetadata = metadata.release();
14838
14839 QCamera3PicChannel *picChannel =
14840 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14841
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014842 // Check if dumping HDR+ YUV output is enabled.
14843 char prop[PROPERTY_VALUE_MAX];
14844 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14845 bool dumpYuvOutput = atoi(prop);
14846
14847 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014848 // Dump yuv buffer to a ppm file.
14849 pbcamera::StreamConfiguration outputConfig;
14850 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14851 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14852 if (rc == OK) {
14853 char buf[FILENAME_MAX] = {};
14854 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14855 result->requestId, result->outputBuffers[0].streamId,
14856 outputConfig.image.width, outputConfig.image.height);
14857
14858 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14859 } else {
14860 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14861 __FUNCTION__, strerror(-rc), rc);
14862 }
14863 }
14864
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014865 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14866 auto halMetadata = std::make_shared<metadata_buffer_t>();
14867 clear_metadata_buffer(halMetadata.get());
14868
14869 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14870 // encoding.
14871 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14872 halStreamId, /*minFrameDuration*/0);
14873 if (res == OK) {
14874 // Return the buffer to pic channel for encoding.
14875 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14876 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14877 halMetadata);
14878 } else {
14879 // Return the buffer without encoding.
14880 // TODO: This should not happen but we may want to report an error buffer to camera
14881 // service.
14882 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14883 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14884 strerror(-res), res);
14885 }
14886
14887 // Send HDR+ metadata to framework.
14888 {
14889 pthread_mutex_lock(&mMutex);
14890
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014891 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14892 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014893 pthread_mutex_unlock(&mMutex);
14894 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014895
14896 // Remove the HDR+ pending request.
14897 {
14898 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14899 auto req = mHdrPlusPendingRequests.find(result->requestId);
14900 mHdrPlusPendingRequests.erase(req);
14901 }
14902 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014903}
14904
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014905void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14906{
14907 if (failedResult == nullptr) {
14908 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14909 return;
14910 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014911
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014912 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014913
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014914 // Remove the pending HDR+ request.
14915 {
14916 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14917 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14918
14919 // Return the buffer to pic channel.
14920 QCamera3PicChannel *picChannel =
14921 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14922 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14923
14924 mHdrPlusPendingRequests.erase(pendingRequest);
14925 }
14926
14927 pthread_mutex_lock(&mMutex);
14928
14929 // Find the pending buffers.
14930 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14931 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14932 if (pendingBuffers->frame_number == failedResult->requestId) {
14933 break;
14934 }
14935 pendingBuffers++;
14936 }
14937
14938 // Send out buffer errors for the pending buffers.
14939 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14940 std::vector<camera3_stream_buffer_t> streamBuffers;
14941 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14942 // Prepare a stream buffer.
14943 camera3_stream_buffer_t streamBuffer = {};
14944 streamBuffer.stream = buffer.stream;
14945 streamBuffer.buffer = buffer.buffer;
14946 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14947 streamBuffer.acquire_fence = -1;
14948 streamBuffer.release_fence = -1;
14949
14950 streamBuffers.push_back(streamBuffer);
14951
14952 // Send out error buffer event.
14953 camera3_notify_msg_t notify_msg = {};
14954 notify_msg.type = CAMERA3_MSG_ERROR;
14955 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14956 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14957 notify_msg.message.error.error_stream = buffer.stream;
14958
14959 orchestrateNotify(&notify_msg);
14960 }
14961
14962 camera3_capture_result_t result = {};
14963 result.frame_number = pendingBuffers->frame_number;
14964 result.num_output_buffers = streamBuffers.size();
14965 result.output_buffers = &streamBuffers[0];
14966
14967 // Send out result with buffer errors.
14968 orchestrateResult(&result);
14969
14970 // Remove pending buffers.
14971 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14972 }
14973
14974 // Remove pending request.
14975 auto halRequest = mPendingRequestsList.begin();
14976 while (halRequest != mPendingRequestsList.end()) {
14977 if (halRequest->frame_number == failedResult->requestId) {
14978 mPendingRequestsList.erase(halRequest);
14979 break;
14980 }
14981 halRequest++;
14982 }
14983
14984 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014985}
14986
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014987
14988ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14989 mParent(parent) {}
14990
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014991void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014992{
14993 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014994
14995 if (isReprocess) {
14996 mReprocessShutters.emplace(frameNumber, Shutter());
14997 } else {
14998 mShutters.emplace(frameNumber, Shutter());
14999 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015000}
15001
15002void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15003{
15004 std::lock_guard<std::mutex> lock(mLock);
15005
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015006 std::map<uint32_t, Shutter> *shutters = nullptr;
15007
15008 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015009 auto shutter = mShutters.find(frameNumber);
15010 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015011 shutter = mReprocessShutters.find(frameNumber);
15012 if (shutter == mReprocessShutters.end()) {
15013 // Shutter was already sent.
15014 return;
15015 }
15016 shutters = &mReprocessShutters;
15017 } else {
15018 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015019 }
15020
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015021 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015022 shutter->second.ready = true;
15023 shutter->second.timestamp = timestamp;
15024
15025 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015026 shutter = shutters->begin();
15027 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015028 if (!shutter->second.ready) {
15029 // If this shutter is not ready, the following shutters can't be sent.
15030 break;
15031 }
15032
15033 camera3_notify_msg_t msg = {};
15034 msg.type = CAMERA3_MSG_SHUTTER;
15035 msg.message.shutter.frame_number = shutter->first;
15036 msg.message.shutter.timestamp = shutter->second.timestamp;
15037 mParent->orchestrateNotify(&msg);
15038
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015039 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015040 }
15041}
15042
15043void ShutterDispatcher::clear(uint32_t frameNumber)
15044{
15045 std::lock_guard<std::mutex> lock(mLock);
15046 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015047 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015048}
15049
15050void ShutterDispatcher::clear()
15051{
15052 std::lock_guard<std::mutex> lock(mLock);
15053
15054 // Log errors for stale shutters.
15055 for (auto &shutter : mShutters) {
15056 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15057 __FUNCTION__, shutter.first, shutter.second.ready,
15058 shutter.second.timestamp);
15059 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015060
15061 // Log errors for stale reprocess shutters.
15062 for (auto &shutter : mReprocessShutters) {
15063 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15064 __FUNCTION__, shutter.first, shutter.second.ready,
15065 shutter.second.timestamp);
15066 }
15067
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015068 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015069 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015070}
15071
15072OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15073 mParent(parent) {}
15074
15075status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15076{
15077 std::lock_guard<std::mutex> lock(mLock);
15078 mStreamBuffers.clear();
15079 if (!streamList) {
15080 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15081 return -EINVAL;
15082 }
15083
15084 // Create a "frame-number -> buffer" map for each stream.
15085 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15086 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15087 }
15088
15089 return OK;
15090}
15091
15092status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15093{
15094 std::lock_guard<std::mutex> lock(mLock);
15095
15096 // Find the "frame-number -> buffer" map for the stream.
15097 auto buffers = mStreamBuffers.find(stream);
15098 if (buffers == mStreamBuffers.end()) {
15099 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15100 return -EINVAL;
15101 }
15102
15103 // Create an unready buffer for this frame number.
15104 buffers->second.emplace(frameNumber, Buffer());
15105 return OK;
15106}
15107
15108void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15109 const camera3_stream_buffer_t &buffer)
15110{
15111 std::lock_guard<std::mutex> lock(mLock);
15112
15113 // Find the frame number -> buffer map for the stream.
15114 auto buffers = mStreamBuffers.find(buffer.stream);
15115 if (buffers == mStreamBuffers.end()) {
15116 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15117 return;
15118 }
15119
15120 // Find the unready buffer this frame number and mark it ready.
15121 auto pendingBuffer = buffers->second.find(frameNumber);
15122 if (pendingBuffer == buffers->second.end()) {
15123 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15124 return;
15125 }
15126
15127 pendingBuffer->second.ready = true;
15128 pendingBuffer->second.buffer = buffer;
15129
15130 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15131 pendingBuffer = buffers->second.begin();
15132 while (pendingBuffer != buffers->second.end()) {
15133 if (!pendingBuffer->second.ready) {
15134 // If this buffer is not ready, the following buffers can't be sent.
15135 break;
15136 }
15137
15138 camera3_capture_result_t result = {};
15139 result.frame_number = pendingBuffer->first;
15140 result.num_output_buffers = 1;
15141 result.output_buffers = &pendingBuffer->second.buffer;
15142
15143 // Send out result with buffer errors.
15144 mParent->orchestrateResult(&result);
15145
15146 pendingBuffer = buffers->second.erase(pendingBuffer);
15147 }
15148}
15149
15150void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15151{
15152 std::lock_guard<std::mutex> lock(mLock);
15153
15154 // Log errors for stale buffers.
15155 for (auto &buffers : mStreamBuffers) {
15156 for (auto &buffer : buffers.second) {
15157 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15158 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15159 }
15160 buffers.second.clear();
15161 }
15162
15163 if (clearConfiguredStreams) {
15164 mStreamBuffers.clear();
15165 }
15166}
15167
Thierry Strudel3d639192016-09-09 11:52:26 -070015168}; //end namespace qcamera