blob: 451b107c8ea63cb9fad34d89b75c6218547eda65 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +010099#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Jason Lee8ce36fa2017-04-19 19:40:37 -0700116/* Face rect indices */
117#define FACE_LEFT 0
118#define FACE_TOP 1
119#define FACE_RIGHT 2
120#define FACE_BOTTOM 3
121#define FACE_WEIGHT 4
122
Thierry Strudel04e026f2016-10-10 11:27:36 -0700123/* Face landmarks indices */
124#define LEFT_EYE_X 0
125#define LEFT_EYE_Y 1
126#define RIGHT_EYE_X 2
127#define RIGHT_EYE_Y 3
128#define MOUTH_X 4
129#define MOUTH_Y 5
130#define TOTAL_LANDMARK_INDICES 6
131
Zhijun He2a5df222017-04-04 18:20:38 -0700132// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700133#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700134
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700135// Whether to check for the GPU stride padding, or use the default
136//#define CHECK_GPU_PIXEL_ALIGNMENT
137
Thierry Strudel3d639192016-09-09 11:52:26 -0700138cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
139const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
140extern pthread_mutex_t gCamLock;
141volatile uint32_t gCamHal3LogLevel = 1;
142extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800144// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700146std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
148std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
149bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700150std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700151bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700152bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800154// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
155bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700157std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Thierry Strudel3d639192016-09-09 11:52:26 -0700159
160const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
161 {"On", CAM_CDS_MODE_ON},
162 {"Off", CAM_CDS_MODE_OFF},
163 {"Auto",CAM_CDS_MODE_AUTO}
164};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_video_hdr_mode_t,
167 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
168 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
169 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
170};
171
Thierry Strudel54dc9782017-02-15 12:12:10 -0800172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_binning_correction_mode_t,
174 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
175 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
176 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
177};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_ir_mode_t,
181 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
182 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
183 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
184 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
185};
Thierry Strudel3d639192016-09-09 11:52:26 -0700186
187const QCamera3HardwareInterface::QCameraMap<
188 camera_metadata_enum_android_control_effect_mode_t,
189 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
190 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
191 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
192 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
193 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
194 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
195 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
197 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
198 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202 camera_metadata_enum_android_control_awb_mode_t,
203 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
204 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
205 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
206 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
207 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
208 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
209 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
210 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
211 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
212 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_scene_mode_t,
217 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
218 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
219 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
220 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
222 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
223 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
225 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
226 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
227 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
228 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
229 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
230 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
231 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
232 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800233 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
234 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700235};
236
237const QCamera3HardwareInterface::QCameraMap<
238 camera_metadata_enum_android_control_af_mode_t,
239 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
240 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
241 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
242 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
243 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
244 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
245 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
246 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
247};
248
249const QCamera3HardwareInterface::QCameraMap<
250 camera_metadata_enum_android_color_correction_aberration_mode_t,
251 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
252 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
253 CAM_COLOR_CORRECTION_ABERRATION_OFF },
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
255 CAM_COLOR_CORRECTION_ABERRATION_FAST },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
257 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
258};
259
260const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_control_ae_antibanding_mode_t,
262 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
264 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
267};
268
269const QCamera3HardwareInterface::QCameraMap<
270 camera_metadata_enum_android_control_ae_mode_t,
271 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
272 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
273 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
274 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
275 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
277};
278
279const QCamera3HardwareInterface::QCameraMap<
280 camera_metadata_enum_android_flash_mode_t,
281 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
282 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
283 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
284 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
285};
286
287const QCamera3HardwareInterface::QCameraMap<
288 camera_metadata_enum_android_statistics_face_detect_mode_t,
289 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
290 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
291 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
293};
294
295const QCamera3HardwareInterface::QCameraMap<
296 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
297 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
298 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
299 CAM_FOCUS_UNCALIBRATED },
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
301 CAM_FOCUS_APPROXIMATE },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
303 CAM_FOCUS_CALIBRATED }
304};
305
306const QCamera3HardwareInterface::QCameraMap<
307 camera_metadata_enum_android_lens_state_t,
308 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
309 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
310 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
311};
312
313const int32_t available_thumbnail_sizes[] = {0, 0,
314 176, 144,
315 240, 144,
316 256, 144,
317 240, 160,
318 256, 154,
319 240, 240,
320 320, 240};
321
322const QCamera3HardwareInterface::QCameraMap<
323 camera_metadata_enum_android_sensor_test_pattern_mode_t,
324 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
331};
332
333/* Since there is no mapping for all the options some Android enum are not listed.
334 * Also, the order in this list is important because while mapping from HAL to Android it will
335 * traverse from lower to higher index which means that for HAL values that are map to different
336 * Android values, the traverse logic will select the first one found.
337 */
338const QCamera3HardwareInterface::QCameraMap<
339 camera_metadata_enum_android_sensor_reference_illuminant1_t,
340 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
357};
358
359const QCamera3HardwareInterface::QCameraMap<
360 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
361 { 60, CAM_HFR_MODE_60FPS},
362 { 90, CAM_HFR_MODE_90FPS},
363 { 120, CAM_HFR_MODE_120FPS},
364 { 150, CAM_HFR_MODE_150FPS},
365 { 180, CAM_HFR_MODE_180FPS},
366 { 210, CAM_HFR_MODE_210FPS},
367 { 240, CAM_HFR_MODE_240FPS},
368 { 480, CAM_HFR_MODE_480FPS},
369};
370
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700371const QCamera3HardwareInterface::QCameraMap<
372 qcamera3_ext_instant_aec_mode_t,
373 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
374 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
375 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
376 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
377};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800378
379const QCamera3HardwareInterface::QCameraMap<
380 qcamera3_ext_exposure_meter_mode_t,
381 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
382 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
383 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
385 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
386 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
389};
390
391const QCamera3HardwareInterface::QCameraMap<
392 qcamera3_ext_iso_mode_t,
393 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
394 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
395 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
396 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
397 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
398 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
399 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
400 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
401 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
402};
403
Thierry Strudel3d639192016-09-09 11:52:26 -0700404camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
405 .initialize = QCamera3HardwareInterface::initialize,
406 .configure_streams = QCamera3HardwareInterface::configure_streams,
407 .register_stream_buffers = NULL,
408 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
409 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
410 .get_metadata_vendor_tag_ops = NULL,
411 .dump = QCamera3HardwareInterface::dump,
412 .flush = QCamera3HardwareInterface::flush,
413 .reserved = {0},
414};
415
416// initialise to some default value
417uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
418
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700419static inline void logEaselEvent(const char *tag, const char *event) {
420 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
421 struct timespec ts = {};
422 static int64_t kMsPerSec = 1000;
423 static int64_t kNsPerMs = 1000000;
424 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
425 if (res != OK) {
426 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
427 } else {
428 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
429 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
430 }
431 }
432}
433
Thierry Strudel3d639192016-09-09 11:52:26 -0700434/*===========================================================================
435 * FUNCTION : QCamera3HardwareInterface
436 *
437 * DESCRIPTION: constructor of QCamera3HardwareInterface
438 *
439 * PARAMETERS :
440 * @cameraId : camera ID
441 *
442 * RETURN : none
443 *==========================================================================*/
444QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
445 const camera_module_callbacks_t *callbacks)
446 : mCameraId(cameraId),
447 mCameraHandle(NULL),
448 mCameraInitialized(false),
449 mCallbackOps(NULL),
450 mMetadataChannel(NULL),
451 mPictureChannel(NULL),
452 mRawChannel(NULL),
453 mSupportChannel(NULL),
454 mAnalysisChannel(NULL),
455 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700456 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800458 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100459 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800460 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mChannelHandle(0),
462 mFirstConfiguration(true),
463 mFlush(false),
464 mFlushPerf(false),
465 mParamHeap(NULL),
466 mParameters(NULL),
467 mPrevParameters(NULL),
468 m_bIsVideo(false),
469 m_bIs4KVideo(false),
470 m_bEisSupportedSize(false),
471 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800472 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700473 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700474 mShutterDispatcher(this),
475 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700476 mMinProcessedFrameDuration(0),
477 mMinJpegFrameDuration(0),
478 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100479 mExpectedFrameDuration(0),
480 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700481 mMetaFrameCount(0U),
482 mUpdateDebugLevel(false),
483 mCallbacks(callbacks),
484 mCaptureIntent(0),
485 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700486 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800487 /* DevCamDebug metadata internal m control*/
488 mDevCamDebugMetaEnable(0),
489 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700490 mBatchSize(0),
491 mToBeQueuedVidBufs(0),
492 mHFRVideoFps(DEFAULT_VIDEO_FPS),
493 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800494 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800495 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700496 mFirstFrameNumberInBatch(0),
497 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800498 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700499 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
500 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000501 mPDSupported(false),
502 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700503 mInstantAEC(false),
504 mResetInstantAEC(false),
505 mInstantAECSettledFrameNumber(0),
506 mAecSkipDisplayFrameBound(0),
507 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700508 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800509 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700512 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mState(CLOSED),
514 mIsDeviceLinked(false),
515 mIsMainCamera(true),
516 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700517 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800518 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800519 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700520 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800521 mIsApInputUsedForHdrPlus(false),
522 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700523 m_bSensorHDREnabled(false),
524 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700525{
526 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCommon.init(gCamCapability[cameraId]);
528 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700529#ifndef USE_HAL_3_3
530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
531#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700533#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.close = close_camera_device;
535 mCameraDevice.ops = &mCameraOps;
536 mCameraDevice.priv = this;
537 gCamCapability[cameraId]->version = CAM_HAL_V3;
538 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
539 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
540 gCamCapability[cameraId]->min_num_pp_bufs = 3;
541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545 mPendingLiveRequest = 0;
546 mCurrentRequestId = -1;
547 pthread_mutex_init(&mMutex, NULL);
548
549 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
550 mDefaultMetadata[i] = NULL;
551
552 // Getting system props of different kinds
553 char prop[PROPERTY_VALUE_MAX];
554 memset(prop, 0, sizeof(prop));
555 property_get("persist.camera.raw.dump", prop, "0");
556 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800557 property_get("persist.camera.hal3.force.hdr", prop, "0");
558 mForceHdrSnapshot = atoi(prop);
559
Thierry Strudel3d639192016-09-09 11:52:26 -0700560 if (mEnableRawDump)
561 LOGD("Raw dump from Camera HAL enabled");
562
563 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
564 memset(mLdafCalib, 0, sizeof(mLdafCalib));
565
566 memset(prop, 0, sizeof(prop));
567 property_get("persist.camera.tnr.preview", prop, "0");
568 m_bTnrPreview = (uint8_t)atoi(prop);
569
570 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800571 property_get("persist.camera.swtnr.preview", prop, "1");
572 m_bSwTnrPreview = (uint8_t)atoi(prop);
573
574 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700575 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 m_bTnrVideo = (uint8_t)atoi(prop);
577
578 memset(prop, 0, sizeof(prop));
579 property_get("persist.camera.avtimer.debug", prop, "0");
580 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800581 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700582
Thierry Strudel54dc9782017-02-15 12:12:10 -0800583 memset(prop, 0, sizeof(prop));
584 property_get("persist.camera.cacmode.disable", prop, "0");
585 m_cacModeDisabled = (uint8_t)atoi(prop);
586
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700587 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700588 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700589
Thierry Strudel3d639192016-09-09 11:52:26 -0700590 //Load and read GPU library.
591 lib_surface_utils = NULL;
592 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700593 mSurfaceStridePadding = CAM_PAD_TO_64;
594#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700595 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
596 if (lib_surface_utils) {
597 *(void **)&LINK_get_surface_pixel_alignment =
598 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
599 if (LINK_get_surface_pixel_alignment) {
600 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
601 }
602 dlclose(lib_surface_utils);
603 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700604#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000605 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
606 mPDSupported = (0 <= mPDIndex) ? true : false;
607
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700608 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700609}
610
611/*===========================================================================
612 * FUNCTION : ~QCamera3HardwareInterface
613 *
614 * DESCRIPTION: destructor of QCamera3HardwareInterface
615 *
616 * PARAMETERS : none
617 *
618 * RETURN : none
619 *==========================================================================*/
620QCamera3HardwareInterface::~QCamera3HardwareInterface()
621{
622 LOGD("E");
623
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800624 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700625
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800626 // Disable power hint and enable the perf lock for close camera
627 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
628 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
629
630 // unlink of dualcam during close camera
631 if (mIsDeviceLinked) {
632 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
633 &m_pDualCamCmdPtr->bundle_info;
634 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
635 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
636 pthread_mutex_lock(&gCamLock);
637
638 if (mIsMainCamera == 1) {
639 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
640 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
641 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
642 // related session id should be session id of linked session
643 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
644 } else {
645 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
646 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
647 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
648 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
649 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800650 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800651 pthread_mutex_unlock(&gCamLock);
652
653 rc = mCameraHandle->ops->set_dual_cam_cmd(
654 mCameraHandle->camera_handle);
655 if (rc < 0) {
656 LOGE("Dualcam: Unlink failed, but still proceed to close");
657 }
658 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700659
660 /* We need to stop all streams before deleting any stream */
661 if (mRawDumpChannel) {
662 mRawDumpChannel->stop();
663 }
664
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700665 if (mHdrPlusRawSrcChannel) {
666 mHdrPlusRawSrcChannel->stop();
667 }
668
Thierry Strudel3d639192016-09-09 11:52:26 -0700669 // NOTE: 'camera3_stream_t *' objects are already freed at
670 // this stage by the framework
671 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
672 it != mStreamInfo.end(); it++) {
673 QCamera3ProcessingChannel *channel = (*it)->channel;
674 if (channel) {
675 channel->stop();
676 }
677 }
678 if (mSupportChannel)
679 mSupportChannel->stop();
680
681 if (mAnalysisChannel) {
682 mAnalysisChannel->stop();
683 }
684 if (mMetadataChannel) {
685 mMetadataChannel->stop();
686 }
687 if (mChannelHandle) {
688 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700689 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700690 LOGD("stopping channel %d", mChannelHandle);
691 }
692
693 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
694 it != mStreamInfo.end(); it++) {
695 QCamera3ProcessingChannel *channel = (*it)->channel;
696 if (channel)
697 delete channel;
698 free (*it);
699 }
700 if (mSupportChannel) {
701 delete mSupportChannel;
702 mSupportChannel = NULL;
703 }
704
705 if (mAnalysisChannel) {
706 delete mAnalysisChannel;
707 mAnalysisChannel = NULL;
708 }
709 if (mRawDumpChannel) {
710 delete mRawDumpChannel;
711 mRawDumpChannel = NULL;
712 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700713 if (mHdrPlusRawSrcChannel) {
714 delete mHdrPlusRawSrcChannel;
715 mHdrPlusRawSrcChannel = NULL;
716 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700717 if (mDummyBatchChannel) {
718 delete mDummyBatchChannel;
719 mDummyBatchChannel = NULL;
720 }
721
722 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800723 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700724
725 if (mMetadataChannel) {
726 delete mMetadataChannel;
727 mMetadataChannel = NULL;
728 }
729
730 /* Clean up all channels */
731 if (mCameraInitialized) {
732 if(!mFirstConfiguration){
733 //send the last unconfigure
734 cam_stream_size_info_t stream_config_info;
735 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
736 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
737 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800738 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700739 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700740 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700741 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
742 stream_config_info);
743 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
744 if (rc < 0) {
745 LOGE("set_parms failed for unconfigure");
746 }
747 }
748 deinitParameters();
749 }
750
751 if (mChannelHandle) {
752 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
753 mChannelHandle);
754 LOGH("deleting channel %d", mChannelHandle);
755 mChannelHandle = 0;
756 }
757
758 if (mState != CLOSED)
759 closeCamera();
760
761 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
762 req.mPendingBufferList.clear();
763 }
764 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700765 for (pendingRequestIterator i = mPendingRequestsList.begin();
766 i != mPendingRequestsList.end();) {
767 i = erasePendingRequest(i);
768 }
769 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
770 if (mDefaultMetadata[i])
771 free_camera_metadata(mDefaultMetadata[i]);
772
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800773 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700774
775 pthread_cond_destroy(&mRequestCond);
776
777 pthread_cond_destroy(&mBuffersCond);
778
779 pthread_mutex_destroy(&mMutex);
780 LOGD("X");
781}
782
783/*===========================================================================
784 * FUNCTION : erasePendingRequest
785 *
786 * DESCRIPTION: function to erase a desired pending request after freeing any
787 * allocated memory
788 *
789 * PARAMETERS :
790 * @i : iterator pointing to pending request to be erased
791 *
792 * RETURN : iterator pointing to the next request
793 *==========================================================================*/
794QCamera3HardwareInterface::pendingRequestIterator
795 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
796{
797 if (i->input_buffer != NULL) {
798 free(i->input_buffer);
799 i->input_buffer = NULL;
800 }
801 if (i->settings != NULL)
802 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100803
804 mExpectedInflightDuration -= i->expectedFrameDuration;
805 if (mExpectedInflightDuration < 0) {
806 LOGE("Negative expected in-flight duration!");
807 mExpectedInflightDuration = 0;
808 }
809
Thierry Strudel3d639192016-09-09 11:52:26 -0700810 return mPendingRequestsList.erase(i);
811}
812
813/*===========================================================================
814 * FUNCTION : camEvtHandle
815 *
816 * DESCRIPTION: Function registered to mm-camera-interface to handle events
817 *
818 * PARAMETERS :
819 * @camera_handle : interface layer camera handle
820 * @evt : ptr to event
821 * @user_data : user data ptr
822 *
823 * RETURN : none
824 *==========================================================================*/
825void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
826 mm_camera_event_t *evt,
827 void *user_data)
828{
829 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
830 if (obj && evt) {
831 switch(evt->server_event_type) {
832 case CAM_EVENT_TYPE_DAEMON_DIED:
833 pthread_mutex_lock(&obj->mMutex);
834 obj->mState = ERROR;
835 pthread_mutex_unlock(&obj->mMutex);
836 LOGE("Fatal, camera daemon died");
837 break;
838
839 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
840 LOGD("HAL got request pull from Daemon");
841 pthread_mutex_lock(&obj->mMutex);
842 obj->mWokenUpByDaemon = true;
843 obj->unblockRequestIfNecessary();
844 pthread_mutex_unlock(&obj->mMutex);
845 break;
846
847 default:
848 LOGW("Warning: Unhandled event %d",
849 evt->server_event_type);
850 break;
851 }
852 } else {
853 LOGE("NULL user_data/evt");
854 }
855}
856
857/*===========================================================================
858 * FUNCTION : openCamera
859 *
860 * DESCRIPTION: open camera
861 *
862 * PARAMETERS :
863 * @hw_device : double ptr for camera device struct
864 *
865 * RETURN : int32_t type of status
866 * NO_ERROR -- success
867 * none-zero failure code
868 *==========================================================================*/
869int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
870{
871 int rc = 0;
872 if (mState != CLOSED) {
873 *hw_device = NULL;
874 return PERMISSION_DENIED;
875 }
876
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700877 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800878 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700879 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
880 mCameraId);
881
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700882 if (mCameraHandle) {
883 LOGE("Failure: Camera already opened");
884 return ALREADY_EXISTS;
885 }
886
887 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700888 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700889 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700890 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700891 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700892 if (rc != 0) {
893 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
894 return rc;
895 }
896 }
897 }
898
Thierry Strudel3d639192016-09-09 11:52:26 -0700899 rc = openCamera();
900 if (rc == 0) {
901 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800902 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700903 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700904
905 // Suspend Easel because opening camera failed.
906 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700907 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700908 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
909 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700910 if (suspendErr != 0) {
911 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
912 strerror(-suspendErr), suspendErr);
913 }
914 }
915 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800916 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700917
Thierry Strudel3d639192016-09-09 11:52:26 -0700918 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
919 mCameraId, rc);
920
921 if (rc == NO_ERROR) {
922 mState = OPENED;
923 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800924
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 return rc;
926}
927
928/*===========================================================================
929 * FUNCTION : openCamera
930 *
931 * DESCRIPTION: open camera
932 *
933 * PARAMETERS : none
934 *
935 * RETURN : int32_t type of status
936 * NO_ERROR -- success
937 * none-zero failure code
938 *==========================================================================*/
939int QCamera3HardwareInterface::openCamera()
940{
941 int rc = 0;
942 char value[PROPERTY_VALUE_MAX];
943
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800944 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800945
Thierry Strudel3d639192016-09-09 11:52:26 -0700946 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
947 if (rc < 0) {
948 LOGE("Failed to reserve flash for camera id: %d",
949 mCameraId);
950 return UNKNOWN_ERROR;
951 }
952
953 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
954 if (rc) {
955 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
956 return rc;
957 }
958
959 if (!mCameraHandle) {
960 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
961 return -ENODEV;
962 }
963
964 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
965 camEvtHandle, (void *)this);
966
967 if (rc < 0) {
968 LOGE("Error, failed to register event callback");
969 /* Not closing camera here since it is already handled in destructor */
970 return FAILED_TRANSACTION;
971 }
972
973 mExifParams.debug_params =
974 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
975 if (mExifParams.debug_params) {
976 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
977 } else {
978 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
979 return NO_MEMORY;
980 }
981 mFirstConfiguration = true;
982
983 //Notify display HAL that a camera session is active.
984 //But avoid calling the same during bootup because camera service might open/close
985 //cameras at boot time during its initialization and display service will also internally
986 //wait for camera service to initialize first while calling this display API, resulting in a
987 //deadlock situation. Since boot time camera open/close calls are made only to fetch
988 //capabilities, no need of this display bw optimization.
989 //Use "service.bootanim.exit" property to know boot status.
990 property_get("service.bootanim.exit", value, "0");
991 if (atoi(value) == 1) {
992 pthread_mutex_lock(&gCamLock);
993 if (gNumCameraSessions++ == 0) {
994 setCameraLaunchStatus(true);
995 }
996 pthread_mutex_unlock(&gCamLock);
997 }
998
999 //fill the session id needed while linking dual cam
1000 pthread_mutex_lock(&gCamLock);
1001 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1002 &sessionId[mCameraId]);
1003 pthread_mutex_unlock(&gCamLock);
1004
1005 if (rc < 0) {
1006 LOGE("Error, failed to get sessiion id");
1007 return UNKNOWN_ERROR;
1008 } else {
1009 //Allocate related cam sync buffer
1010 //this is needed for the payload that goes along with bundling cmd for related
1011 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001012 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1013 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001014 if(rc != OK) {
1015 rc = NO_MEMORY;
1016 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1017 return NO_MEMORY;
1018 }
1019
1020 //Map memory for related cam sync buffer
1021 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001022 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1023 m_pDualCamCmdHeap->getFd(0),
1024 sizeof(cam_dual_camera_cmd_info_t),
1025 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001026 if(rc < 0) {
1027 LOGE("Dualcam: failed to map Related cam sync buffer");
1028 rc = FAILED_TRANSACTION;
1029 return NO_MEMORY;
1030 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001031 m_pDualCamCmdPtr =
1032 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001033 }
1034
1035 LOGH("mCameraId=%d",mCameraId);
1036
1037 return NO_ERROR;
1038}
1039
1040/*===========================================================================
1041 * FUNCTION : closeCamera
1042 *
1043 * DESCRIPTION: close camera
1044 *
1045 * PARAMETERS : none
1046 *
1047 * RETURN : int32_t type of status
1048 * NO_ERROR -- success
1049 * none-zero failure code
1050 *==========================================================================*/
1051int QCamera3HardwareInterface::closeCamera()
1052{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001053 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001054 int rc = NO_ERROR;
1055 char value[PROPERTY_VALUE_MAX];
1056
1057 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1058 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001059
1060 // unmap memory for related cam sync buffer
1061 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001062 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001063 if (NULL != m_pDualCamCmdHeap) {
1064 m_pDualCamCmdHeap->deallocate();
1065 delete m_pDualCamCmdHeap;
1066 m_pDualCamCmdHeap = NULL;
1067 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001068 }
1069
Thierry Strudel3d639192016-09-09 11:52:26 -07001070 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1071 mCameraHandle = NULL;
1072
1073 //reset session id to some invalid id
1074 pthread_mutex_lock(&gCamLock);
1075 sessionId[mCameraId] = 0xDEADBEEF;
1076 pthread_mutex_unlock(&gCamLock);
1077
1078 //Notify display HAL that there is no active camera session
1079 //but avoid calling the same during bootup. Refer to openCamera
1080 //for more details.
1081 property_get("service.bootanim.exit", value, "0");
1082 if (atoi(value) == 1) {
1083 pthread_mutex_lock(&gCamLock);
1084 if (--gNumCameraSessions == 0) {
1085 setCameraLaunchStatus(false);
1086 }
1087 pthread_mutex_unlock(&gCamLock);
1088 }
1089
Thierry Strudel3d639192016-09-09 11:52:26 -07001090 if (mExifParams.debug_params) {
1091 free(mExifParams.debug_params);
1092 mExifParams.debug_params = NULL;
1093 }
1094 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1095 LOGW("Failed to release flash for camera id: %d",
1096 mCameraId);
1097 }
1098 mState = CLOSED;
1099 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1100 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001101
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001102 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001103 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1104 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001105 if (gHdrPlusClient != nullptr) {
1106 // Disable HDR+ mode.
1107 disableHdrPlusModeLocked();
1108 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001109 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001110 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001111 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001112
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001113 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001114 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001115 if (rc != 0) {
1116 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1117 }
1118
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001119 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001120 if (rc != 0) {
1121 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1122 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001123 }
1124 }
1125
Thierry Strudel3d639192016-09-09 11:52:26 -07001126 return rc;
1127}
1128
1129/*===========================================================================
1130 * FUNCTION : initialize
1131 *
1132 * DESCRIPTION: Initialize frameworks callback functions
1133 *
1134 * PARAMETERS :
1135 * @callback_ops : callback function to frameworks
1136 *
1137 * RETURN :
1138 *
1139 *==========================================================================*/
1140int QCamera3HardwareInterface::initialize(
1141 const struct camera3_callback_ops *callback_ops)
1142{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001143 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001144 int rc;
1145
1146 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1147 pthread_mutex_lock(&mMutex);
1148
1149 // Validate current state
1150 switch (mState) {
1151 case OPENED:
1152 /* valid state */
1153 break;
1154 default:
1155 LOGE("Invalid state %d", mState);
1156 rc = -ENODEV;
1157 goto err1;
1158 }
1159
1160 rc = initParameters();
1161 if (rc < 0) {
1162 LOGE("initParamters failed %d", rc);
1163 goto err1;
1164 }
1165 mCallbackOps = callback_ops;
1166
1167 mChannelHandle = mCameraHandle->ops->add_channel(
1168 mCameraHandle->camera_handle, NULL, NULL, this);
1169 if (mChannelHandle == 0) {
1170 LOGE("add_channel failed");
1171 rc = -ENOMEM;
1172 pthread_mutex_unlock(&mMutex);
1173 return rc;
1174 }
1175
1176 pthread_mutex_unlock(&mMutex);
1177 mCameraInitialized = true;
1178 mState = INITIALIZED;
1179 LOGI("X");
1180 return 0;
1181
1182err1:
1183 pthread_mutex_unlock(&mMutex);
1184 return rc;
1185}
1186
1187/*===========================================================================
1188 * FUNCTION : validateStreamDimensions
1189 *
1190 * DESCRIPTION: Check if the configuration requested are those advertised
1191 *
1192 * PARAMETERS :
1193 * @stream_list : streams to be configured
1194 *
1195 * RETURN :
1196 *
1197 *==========================================================================*/
1198int QCamera3HardwareInterface::validateStreamDimensions(
1199 camera3_stream_configuration_t *streamList)
1200{
1201 int rc = NO_ERROR;
1202 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001203 uint32_t depthWidth = 0;
1204 uint32_t depthHeight = 0;
1205 if (mPDSupported) {
1206 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1207 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1208 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001209
1210 camera3_stream_t *inputStream = NULL;
1211 /*
1212 * Loop through all streams to find input stream if it exists*
1213 */
1214 for (size_t i = 0; i< streamList->num_streams; i++) {
1215 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1216 if (inputStream != NULL) {
1217 LOGE("Error, Multiple input streams requested");
1218 return -EINVAL;
1219 }
1220 inputStream = streamList->streams[i];
1221 }
1222 }
1223 /*
1224 * Loop through all streams requested in configuration
1225 * Check if unsupported sizes have been requested on any of them
1226 */
1227 for (size_t j = 0; j < streamList->num_streams; j++) {
1228 bool sizeFound = false;
1229 camera3_stream_t *newStream = streamList->streams[j];
1230
1231 uint32_t rotatedHeight = newStream->height;
1232 uint32_t rotatedWidth = newStream->width;
1233 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1234 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1235 rotatedHeight = newStream->width;
1236 rotatedWidth = newStream->height;
1237 }
1238
1239 /*
1240 * Sizes are different for each type of stream format check against
1241 * appropriate table.
1242 */
1243 switch (newStream->format) {
1244 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1245 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1246 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001247 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1248 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1249 mPDSupported) {
1250 if ((depthWidth == newStream->width) &&
1251 (depthHeight == newStream->height)) {
1252 sizeFound = true;
1253 }
1254 break;
1255 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001256 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1257 for (size_t i = 0; i < count; i++) {
1258 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1259 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1260 sizeFound = true;
1261 break;
1262 }
1263 }
1264 break;
1265 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001266 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1267 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001268 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001269 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001270 if ((depthSamplesCount == newStream->width) &&
1271 (1 == newStream->height)) {
1272 sizeFound = true;
1273 }
1274 break;
1275 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001276 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1277 /* Verify set size against generated sizes table */
1278 for (size_t i = 0; i < count; i++) {
1279 if (((int32_t)rotatedWidth ==
1280 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1281 ((int32_t)rotatedHeight ==
1282 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1283 sizeFound = true;
1284 break;
1285 }
1286 }
1287 break;
1288 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1289 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1290 default:
1291 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1292 || newStream->stream_type == CAMERA3_STREAM_INPUT
1293 || IS_USAGE_ZSL(newStream->usage)) {
1294 if (((int32_t)rotatedWidth ==
1295 gCamCapability[mCameraId]->active_array_size.width) &&
1296 ((int32_t)rotatedHeight ==
1297 gCamCapability[mCameraId]->active_array_size.height)) {
1298 sizeFound = true;
1299 break;
1300 }
1301 /* We could potentially break here to enforce ZSL stream
1302 * set from frameworks always is full active array size
1303 * but it is not clear from the spc if framework will always
1304 * follow that, also we have logic to override to full array
1305 * size, so keeping the logic lenient at the moment
1306 */
1307 }
1308 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1309 MAX_SIZES_CNT);
1310 for (size_t i = 0; i < count; i++) {
1311 if (((int32_t)rotatedWidth ==
1312 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1313 ((int32_t)rotatedHeight ==
1314 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1315 sizeFound = true;
1316 break;
1317 }
1318 }
1319 break;
1320 } /* End of switch(newStream->format) */
1321
1322 /* We error out even if a single stream has unsupported size set */
1323 if (!sizeFound) {
1324 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1325 rotatedWidth, rotatedHeight, newStream->format,
1326 gCamCapability[mCameraId]->active_array_size.width,
1327 gCamCapability[mCameraId]->active_array_size.height);
1328 rc = -EINVAL;
1329 break;
1330 }
1331 } /* End of for each stream */
1332 return rc;
1333}
1334
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001335/*===========================================================================
1336 * FUNCTION : validateUsageFlags
1337 *
1338 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1339 *
1340 * PARAMETERS :
1341 * @stream_list : streams to be configured
1342 *
1343 * RETURN :
1344 * NO_ERROR if the usage flags are supported
1345 * error code if usage flags are not supported
1346 *
1347 *==========================================================================*/
1348int QCamera3HardwareInterface::validateUsageFlags(
1349 const camera3_stream_configuration_t* streamList)
1350{
1351 for (size_t j = 0; j < streamList->num_streams; j++) {
1352 const camera3_stream_t *newStream = streamList->streams[j];
1353
1354 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1355 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1356 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1357 continue;
1358 }
1359
Jason Leec4cf5032017-05-24 18:31:41 -07001360 // Here we only care whether it's EIS3 or not
1361 char is_type_value[PROPERTY_VALUE_MAX];
1362 property_get("persist.camera.is_type", is_type_value, "4");
1363 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1364 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1365 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1366 isType = IS_TYPE_NONE;
1367
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1369 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1370 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1371 bool forcePreviewUBWC = true;
1372 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1373 forcePreviewUBWC = false;
1374 }
1375 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001376 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001377 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001378 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001379 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001380 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001381
1382 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1383 // So color spaces will always match.
1384
1385 // Check whether underlying formats of shared streams match.
1386 if (isVideo && isPreview && videoFormat != previewFormat) {
1387 LOGE("Combined video and preview usage flag is not supported");
1388 return -EINVAL;
1389 }
1390 if (isPreview && isZSL && previewFormat != zslFormat) {
1391 LOGE("Combined preview and zsl usage flag is not supported");
1392 return -EINVAL;
1393 }
1394 if (isVideo && isZSL && videoFormat != zslFormat) {
1395 LOGE("Combined video and zsl usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 }
1399 return NO_ERROR;
1400}
1401
1402/*===========================================================================
1403 * FUNCTION : validateUsageFlagsForEis
1404 *
1405 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1406 *
1407 * PARAMETERS :
1408 * @stream_list : streams to be configured
1409 *
1410 * RETURN :
1411 * NO_ERROR if the usage flags are supported
1412 * error code if usage flags are not supported
1413 *
1414 *==========================================================================*/
1415int QCamera3HardwareInterface::validateUsageFlagsForEis(
1416 const camera3_stream_configuration_t* streamList)
1417{
1418 for (size_t j = 0; j < streamList->num_streams; j++) {
1419 const camera3_stream_t *newStream = streamList->streams[j];
1420
1421 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1422 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1423
1424 // Because EIS is "hard-coded" for certain use case, and current
1425 // implementation doesn't support shared preview and video on the same
1426 // stream, return failure if EIS is forced on.
1427 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1428 LOGE("Combined video and preview usage flag is not supported due to EIS");
1429 return -EINVAL;
1430 }
1431 }
1432 return NO_ERROR;
1433}
1434
Thierry Strudel3d639192016-09-09 11:52:26 -07001435/*==============================================================================
1436 * FUNCTION : isSupportChannelNeeded
1437 *
1438 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1439 *
1440 * PARAMETERS :
1441 * @stream_list : streams to be configured
1442 * @stream_config_info : the config info for streams to be configured
1443 *
1444 * RETURN : Boolen true/false decision
1445 *
1446 *==========================================================================*/
1447bool QCamera3HardwareInterface::isSupportChannelNeeded(
1448 camera3_stream_configuration_t *streamList,
1449 cam_stream_size_info_t stream_config_info)
1450{
1451 uint32_t i;
1452 bool pprocRequested = false;
1453 /* Check for conditions where PProc pipeline does not have any streams*/
1454 for (i = 0; i < stream_config_info.num_streams; i++) {
1455 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1456 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1457 pprocRequested = true;
1458 break;
1459 }
1460 }
1461
1462 if (pprocRequested == false )
1463 return true;
1464
1465 /* Dummy stream needed if only raw or jpeg streams present */
1466 for (i = 0; i < streamList->num_streams; i++) {
1467 switch(streamList->streams[i]->format) {
1468 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1469 case HAL_PIXEL_FORMAT_RAW10:
1470 case HAL_PIXEL_FORMAT_RAW16:
1471 case HAL_PIXEL_FORMAT_BLOB:
1472 break;
1473 default:
1474 return false;
1475 }
1476 }
1477 return true;
1478}
1479
1480/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001483 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001484 *
1485 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001486 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001487 *
1488 * RETURN : int32_t type of status
1489 * NO_ERROR -- success
1490 * none-zero failure code
1491 *
1492 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001493int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001494{
1495 int32_t rc = NO_ERROR;
1496
1497 cam_dimension_t max_dim = {0, 0};
1498 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1499 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1500 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1501 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1502 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1503 }
1504
1505 clear_metadata_buffer(mParameters);
1506
1507 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1508 max_dim);
1509 if (rc != NO_ERROR) {
1510 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1511 return rc;
1512 }
1513
1514 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1515 if (rc != NO_ERROR) {
1516 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1517 return rc;
1518 }
1519
1520 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001521 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001522
1523 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1524 mParameters);
1525 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001526 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001527 return rc;
1528 }
1529
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001530 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001531 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1532 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1533 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1534 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1535 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001536
1537 return rc;
1538}
1539
1540/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001541 * FUNCTION : getCurrentSensorModeInfo
1542 *
1543 * DESCRIPTION: Get sensor mode information that is currently selected.
1544 *
1545 * PARAMETERS :
1546 * @sensorModeInfo : sensor mode information (output)
1547 *
1548 * RETURN : int32_t type of status
1549 * NO_ERROR -- success
1550 * none-zero failure code
1551 *
1552 *==========================================================================*/
1553int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1554{
1555 int32_t rc = NO_ERROR;
1556
1557 clear_metadata_buffer(mParameters);
1558 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1559
1560 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1561 mParameters);
1562 if (rc != NO_ERROR) {
1563 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1564 return rc;
1565 }
1566
1567 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1568 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1569 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1570 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1571 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1572 sensorModeInfo.num_raw_bits);
1573
1574 return rc;
1575}
1576
1577/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001578 * FUNCTION : addToPPFeatureMask
1579 *
1580 * DESCRIPTION: add additional features to pp feature mask based on
1581 * stream type and usecase
1582 *
1583 * PARAMETERS :
1584 * @stream_format : stream type for feature mask
1585 * @stream_idx : stream idx within postprocess_mask list to change
1586 *
1587 * RETURN : NULL
1588 *
1589 *==========================================================================*/
1590void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1591 uint32_t stream_idx)
1592{
1593 char feature_mask_value[PROPERTY_VALUE_MAX];
1594 cam_feature_mask_t feature_mask;
1595 int args_converted;
1596 int property_len;
1597
1598 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001599#ifdef _LE_CAMERA_
1600 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1601 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1602 property_len = property_get("persist.camera.hal3.feature",
1603 feature_mask_value, swtnr_feature_mask_value);
1604#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001605 property_len = property_get("persist.camera.hal3.feature",
1606 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001607#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001608 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1609 (feature_mask_value[1] == 'x')) {
1610 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1611 } else {
1612 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1613 }
1614 if (1 != args_converted) {
1615 feature_mask = 0;
1616 LOGE("Wrong feature mask %s", feature_mask_value);
1617 return;
1618 }
1619
1620 switch (stream_format) {
1621 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1622 /* Add LLVD to pp feature mask only if video hint is enabled */
1623 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1624 mStreamConfigInfo.postprocess_mask[stream_idx]
1625 |= CAM_QTI_FEATURE_SW_TNR;
1626 LOGH("Added SW TNR to pp feature mask");
1627 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1628 mStreamConfigInfo.postprocess_mask[stream_idx]
1629 |= CAM_QCOM_FEATURE_LLVD;
1630 LOGH("Added LLVD SeeMore to pp feature mask");
1631 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001632 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1633 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1634 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1635 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001636 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1637 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1638 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1639 CAM_QTI_FEATURE_BINNING_CORRECTION;
1640 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001641 break;
1642 }
1643 default:
1644 break;
1645 }
1646 LOGD("PP feature mask %llx",
1647 mStreamConfigInfo.postprocess_mask[stream_idx]);
1648}
1649
1650/*==============================================================================
1651 * FUNCTION : updateFpsInPreviewBuffer
1652 *
1653 * DESCRIPTION: update FPS information in preview buffer.
1654 *
1655 * PARAMETERS :
1656 * @metadata : pointer to metadata buffer
1657 * @frame_number: frame_number to look for in pending buffer list
1658 *
1659 * RETURN : None
1660 *
1661 *==========================================================================*/
1662void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1663 uint32_t frame_number)
1664{
1665 // Mark all pending buffers for this particular request
1666 // with corresponding framerate information
1667 for (List<PendingBuffersInRequest>::iterator req =
1668 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1669 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1670 for(List<PendingBufferInfo>::iterator j =
1671 req->mPendingBufferList.begin();
1672 j != req->mPendingBufferList.end(); j++) {
1673 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1674 if ((req->frame_number == frame_number) &&
1675 (channel->getStreamTypeMask() &
1676 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1677 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1678 CAM_INTF_PARM_FPS_RANGE, metadata) {
1679 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1680 struct private_handle_t *priv_handle =
1681 (struct private_handle_t *)(*(j->buffer));
1682 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1683 }
1684 }
1685 }
1686 }
1687}
1688
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001689/*==============================================================================
1690 * FUNCTION : updateTimeStampInPendingBuffers
1691 *
1692 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1693 * of a frame number
1694 *
1695 * PARAMETERS :
1696 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1697 * @timestamp : timestamp to be set
1698 *
1699 * RETURN : None
1700 *
1701 *==========================================================================*/
1702void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1703 uint32_t frameNumber, nsecs_t timestamp)
1704{
1705 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1706 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1707 if (req->frame_number != frameNumber)
1708 continue;
1709
1710 for (auto k = req->mPendingBufferList.begin();
1711 k != req->mPendingBufferList.end(); k++ ) {
1712 struct private_handle_t *priv_handle =
1713 (struct private_handle_t *) (*(k->buffer));
1714 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1715 }
1716 }
1717 return;
1718}
1719
Thierry Strudel3d639192016-09-09 11:52:26 -07001720/*===========================================================================
1721 * FUNCTION : configureStreams
1722 *
1723 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1724 * and output streams.
1725 *
1726 * PARAMETERS :
1727 * @stream_list : streams to be configured
1728 *
1729 * RETURN :
1730 *
1731 *==========================================================================*/
1732int QCamera3HardwareInterface::configureStreams(
1733 camera3_stream_configuration_t *streamList)
1734{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001735 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001736 int rc = 0;
1737
1738 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001739 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001740 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001741 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001742
1743 return rc;
1744}
1745
1746/*===========================================================================
1747 * FUNCTION : configureStreamsPerfLocked
1748 *
1749 * DESCRIPTION: configureStreams while perfLock is held.
1750 *
1751 * PARAMETERS :
1752 * @stream_list : streams to be configured
1753 *
1754 * RETURN : int32_t type of status
1755 * NO_ERROR -- success
1756 * none-zero failure code
1757 *==========================================================================*/
1758int QCamera3HardwareInterface::configureStreamsPerfLocked(
1759 camera3_stream_configuration_t *streamList)
1760{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001761 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001762 int rc = 0;
1763
1764 // Sanity check stream_list
1765 if (streamList == NULL) {
1766 LOGE("NULL stream configuration");
1767 return BAD_VALUE;
1768 }
1769 if (streamList->streams == NULL) {
1770 LOGE("NULL stream list");
1771 return BAD_VALUE;
1772 }
1773
1774 if (streamList->num_streams < 1) {
1775 LOGE("Bad number of streams requested: %d",
1776 streamList->num_streams);
1777 return BAD_VALUE;
1778 }
1779
1780 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1781 LOGE("Maximum number of streams %d exceeded: %d",
1782 MAX_NUM_STREAMS, streamList->num_streams);
1783 return BAD_VALUE;
1784 }
1785
Jason Leec4cf5032017-05-24 18:31:41 -07001786 mOpMode = streamList->operation_mode;
1787 LOGD("mOpMode: %d", mOpMode);
1788
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001789 rc = validateUsageFlags(streamList);
1790 if (rc != NO_ERROR) {
1791 return rc;
1792 }
1793
Thierry Strudel3d639192016-09-09 11:52:26 -07001794 /* first invalidate all the steams in the mStreamList
1795 * if they appear again, they will be validated */
1796 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1797 it != mStreamInfo.end(); it++) {
1798 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1799 if (channel) {
1800 channel->stop();
1801 }
1802 (*it)->status = INVALID;
1803 }
1804
1805 if (mRawDumpChannel) {
1806 mRawDumpChannel->stop();
1807 delete mRawDumpChannel;
1808 mRawDumpChannel = NULL;
1809 }
1810
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001811 if (mHdrPlusRawSrcChannel) {
1812 mHdrPlusRawSrcChannel->stop();
1813 delete mHdrPlusRawSrcChannel;
1814 mHdrPlusRawSrcChannel = NULL;
1815 }
1816
Thierry Strudel3d639192016-09-09 11:52:26 -07001817 if (mSupportChannel)
1818 mSupportChannel->stop();
1819
1820 if (mAnalysisChannel) {
1821 mAnalysisChannel->stop();
1822 }
1823 if (mMetadataChannel) {
1824 /* If content of mStreamInfo is not 0, there is metadata stream */
1825 mMetadataChannel->stop();
1826 }
1827 if (mChannelHandle) {
1828 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001829 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 LOGD("stopping channel %d", mChannelHandle);
1831 }
1832
1833 pthread_mutex_lock(&mMutex);
1834
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001835 mPictureChannel = NULL;
1836
Thierry Strudel3d639192016-09-09 11:52:26 -07001837 // Check state
1838 switch (mState) {
1839 case INITIALIZED:
1840 case CONFIGURED:
1841 case STARTED:
1842 /* valid state */
1843 break;
1844 default:
1845 LOGE("Invalid state %d", mState);
1846 pthread_mutex_unlock(&mMutex);
1847 return -ENODEV;
1848 }
1849
1850 /* Check whether we have video stream */
1851 m_bIs4KVideo = false;
1852 m_bIsVideo = false;
1853 m_bEisSupportedSize = false;
1854 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001855 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001856 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001857 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001858 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001859 uint32_t videoWidth = 0U;
1860 uint32_t videoHeight = 0U;
1861 size_t rawStreamCnt = 0;
1862 size_t stallStreamCnt = 0;
1863 size_t processedStreamCnt = 0;
1864 // Number of streams on ISP encoder path
1865 size_t numStreamsOnEncoder = 0;
1866 size_t numYuv888OnEncoder = 0;
1867 bool bYuv888OverrideJpeg = false;
1868 cam_dimension_t largeYuv888Size = {0, 0};
1869 cam_dimension_t maxViewfinderSize = {0, 0};
1870 bool bJpegExceeds4K = false;
1871 bool bJpegOnEncoder = false;
1872 bool bUseCommonFeatureMask = false;
1873 cam_feature_mask_t commonFeatureMask = 0;
1874 bool bSmallJpegSize = false;
1875 uint32_t width_ratio;
1876 uint32_t height_ratio;
1877 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1878 camera3_stream_t *inputStream = NULL;
1879 bool isJpeg = false;
1880 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001881 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001882 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001883
1884 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1885
1886 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 uint8_t eis_prop_set;
1888 uint32_t maxEisWidth = 0;
1889 uint32_t maxEisHeight = 0;
1890
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001891 // Initialize all instant AEC related variables
1892 mInstantAEC = false;
1893 mResetInstantAEC = false;
1894 mInstantAECSettledFrameNumber = 0;
1895 mAecSkipDisplayFrameBound = 0;
1896 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001897 mCurrFeatureState = 0;
1898 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001899
Thierry Strudel3d639192016-09-09 11:52:26 -07001900 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1901
1902 size_t count = IS_TYPE_MAX;
1903 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1904 for (size_t i = 0; i < count; i++) {
1905 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001906 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1907 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001908 break;
1909 }
1910 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001911
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001912 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001913 maxEisWidth = MAX_EIS_WIDTH;
1914 maxEisHeight = MAX_EIS_HEIGHT;
1915 }
1916
1917 /* EIS setprop control */
1918 char eis_prop[PROPERTY_VALUE_MAX];
1919 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001920 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001921 eis_prop_set = (uint8_t)atoi(eis_prop);
1922
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001923 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001924 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1925
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001926 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1927 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001928
Thierry Strudel3d639192016-09-09 11:52:26 -07001929 /* stream configurations */
1930 for (size_t i = 0; i < streamList->num_streams; i++) {
1931 camera3_stream_t *newStream = streamList->streams[i];
1932 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1933 "height = %d, rotation = %d, usage = 0x%x",
1934 i, newStream->stream_type, newStream->format,
1935 newStream->width, newStream->height, newStream->rotation,
1936 newStream->usage);
1937 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1938 newStream->stream_type == CAMERA3_STREAM_INPUT){
1939 isZsl = true;
1940 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001941 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1942 IS_USAGE_PREVIEW(newStream->usage)) {
1943 isPreview = true;
1944 }
1945
Thierry Strudel3d639192016-09-09 11:52:26 -07001946 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1947 inputStream = newStream;
1948 }
1949
Emilian Peev7650c122017-01-19 08:24:33 -08001950 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1951 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001952 isJpeg = true;
1953 jpegSize.width = newStream->width;
1954 jpegSize.height = newStream->height;
1955 if (newStream->width > VIDEO_4K_WIDTH ||
1956 newStream->height > VIDEO_4K_HEIGHT)
1957 bJpegExceeds4K = true;
1958 }
1959
1960 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1961 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1962 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001963 // In HAL3 we can have multiple different video streams.
1964 // The variables video width and height are used below as
1965 // dimensions of the biggest of them
1966 if (videoWidth < newStream->width ||
1967 videoHeight < newStream->height) {
1968 videoWidth = newStream->width;
1969 videoHeight = newStream->height;
1970 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1972 (VIDEO_4K_HEIGHT <= newStream->height)) {
1973 m_bIs4KVideo = true;
1974 }
1975 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1976 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001977
Thierry Strudel3d639192016-09-09 11:52:26 -07001978 }
1979 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1980 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1981 switch (newStream->format) {
1982 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001983 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1984 depthPresent = true;
1985 break;
1986 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001987 stallStreamCnt++;
1988 if (isOnEncoder(maxViewfinderSize, newStream->width,
1989 newStream->height)) {
1990 numStreamsOnEncoder++;
1991 bJpegOnEncoder = true;
1992 }
1993 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1994 newStream->width);
1995 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1996 newStream->height);;
1997 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1998 "FATAL: max_downscale_factor cannot be zero and so assert");
1999 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2000 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2001 LOGH("Setting small jpeg size flag to true");
2002 bSmallJpegSize = true;
2003 }
2004 break;
2005 case HAL_PIXEL_FORMAT_RAW10:
2006 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2007 case HAL_PIXEL_FORMAT_RAW16:
2008 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002009 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2010 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2011 pdStatCount++;
2012 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002013 break;
2014 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2015 processedStreamCnt++;
2016 if (isOnEncoder(maxViewfinderSize, newStream->width,
2017 newStream->height)) {
2018 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2019 !IS_USAGE_ZSL(newStream->usage)) {
2020 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2021 }
2022 numStreamsOnEncoder++;
2023 }
2024 break;
2025 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2026 processedStreamCnt++;
2027 if (isOnEncoder(maxViewfinderSize, newStream->width,
2028 newStream->height)) {
2029 // If Yuv888 size is not greater than 4K, set feature mask
2030 // to SUPERSET so that it support concurrent request on
2031 // YUV and JPEG.
2032 if (newStream->width <= VIDEO_4K_WIDTH &&
2033 newStream->height <= VIDEO_4K_HEIGHT) {
2034 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2035 }
2036 numStreamsOnEncoder++;
2037 numYuv888OnEncoder++;
2038 largeYuv888Size.width = newStream->width;
2039 largeYuv888Size.height = newStream->height;
2040 }
2041 break;
2042 default:
2043 processedStreamCnt++;
2044 if (isOnEncoder(maxViewfinderSize, newStream->width,
2045 newStream->height)) {
2046 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2047 numStreamsOnEncoder++;
2048 }
2049 break;
2050 }
2051
2052 }
2053 }
2054
2055 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2056 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2057 !m_bIsVideo) {
2058 m_bEisEnable = false;
2059 }
2060
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002061 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2062 pthread_mutex_unlock(&mMutex);
2063 return -EINVAL;
2064 }
2065
Thierry Strudel54dc9782017-02-15 12:12:10 -08002066 uint8_t forceEnableTnr = 0;
2067 char tnr_prop[PROPERTY_VALUE_MAX];
2068 memset(tnr_prop, 0, sizeof(tnr_prop));
2069 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2070 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2071
Thierry Strudel3d639192016-09-09 11:52:26 -07002072 /* Logic to enable/disable TNR based on specific config size/etc.*/
2073 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002074 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2075 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002076 else if (forceEnableTnr)
2077 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002078
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002079 char videoHdrProp[PROPERTY_VALUE_MAX];
2080 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2081 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2082 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2083
2084 if (hdr_mode_prop == 1 && m_bIsVideo &&
2085 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2086 m_bVideoHdrEnabled = true;
2087 else
2088 m_bVideoHdrEnabled = false;
2089
2090
Thierry Strudel3d639192016-09-09 11:52:26 -07002091 /* Check if num_streams is sane */
2092 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2093 rawStreamCnt > MAX_RAW_STREAMS ||
2094 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2095 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2096 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2097 pthread_mutex_unlock(&mMutex);
2098 return -EINVAL;
2099 }
2100 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002101 if (isZsl && m_bIs4KVideo) {
2102 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002103 pthread_mutex_unlock(&mMutex);
2104 return -EINVAL;
2105 }
2106 /* Check if stream sizes are sane */
2107 if (numStreamsOnEncoder > 2) {
2108 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2109 pthread_mutex_unlock(&mMutex);
2110 return -EINVAL;
2111 } else if (1 < numStreamsOnEncoder){
2112 bUseCommonFeatureMask = true;
2113 LOGH("Multiple streams above max viewfinder size, common mask needed");
2114 }
2115
2116 /* Check if BLOB size is greater than 4k in 4k recording case */
2117 if (m_bIs4KVideo && bJpegExceeds4K) {
2118 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2119 pthread_mutex_unlock(&mMutex);
2120 return -EINVAL;
2121 }
2122
Emilian Peev7650c122017-01-19 08:24:33 -08002123 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2124 depthPresent) {
2125 LOGE("HAL doesn't support depth streams in HFR mode!");
2126 pthread_mutex_unlock(&mMutex);
2127 return -EINVAL;
2128 }
2129
Thierry Strudel3d639192016-09-09 11:52:26 -07002130 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2131 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2132 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2133 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2134 // configurations:
2135 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2136 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2137 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2138 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2139 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2140 __func__);
2141 pthread_mutex_unlock(&mMutex);
2142 return -EINVAL;
2143 }
2144
2145 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2146 // the YUV stream's size is greater or equal to the JPEG size, set common
2147 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2148 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2149 jpegSize.width, jpegSize.height) &&
2150 largeYuv888Size.width > jpegSize.width &&
2151 largeYuv888Size.height > jpegSize.height) {
2152 bYuv888OverrideJpeg = true;
2153 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2154 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2155 }
2156
2157 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2158 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2159 commonFeatureMask);
2160 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2161 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2162
2163 rc = validateStreamDimensions(streamList);
2164 if (rc == NO_ERROR) {
2165 rc = validateStreamRotations(streamList);
2166 }
2167 if (rc != NO_ERROR) {
2168 LOGE("Invalid stream configuration requested!");
2169 pthread_mutex_unlock(&mMutex);
2170 return rc;
2171 }
2172
Emilian Peev0f3c3162017-03-15 12:57:46 +00002173 if (1 < pdStatCount) {
2174 LOGE("HAL doesn't support multiple PD streams");
2175 pthread_mutex_unlock(&mMutex);
2176 return -EINVAL;
2177 }
2178
2179 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2180 (1 == pdStatCount)) {
2181 LOGE("HAL doesn't support PD streams in HFR mode!");
2182 pthread_mutex_unlock(&mMutex);
2183 return -EINVAL;
2184 }
2185
Thierry Strudel3d639192016-09-09 11:52:26 -07002186 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2187 for (size_t i = 0; i < streamList->num_streams; i++) {
2188 camera3_stream_t *newStream = streamList->streams[i];
2189 LOGH("newStream type = %d, stream format = %d "
2190 "stream size : %d x %d, stream rotation = %d",
2191 newStream->stream_type, newStream->format,
2192 newStream->width, newStream->height, newStream->rotation);
2193 //if the stream is in the mStreamList validate it
2194 bool stream_exists = false;
2195 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2196 it != mStreamInfo.end(); it++) {
2197 if ((*it)->stream == newStream) {
2198 QCamera3ProcessingChannel *channel =
2199 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2200 stream_exists = true;
2201 if (channel)
2202 delete channel;
2203 (*it)->status = VALID;
2204 (*it)->stream->priv = NULL;
2205 (*it)->channel = NULL;
2206 }
2207 }
2208 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2209 //new stream
2210 stream_info_t* stream_info;
2211 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2212 if (!stream_info) {
2213 LOGE("Could not allocate stream info");
2214 rc = -ENOMEM;
2215 pthread_mutex_unlock(&mMutex);
2216 return rc;
2217 }
2218 stream_info->stream = newStream;
2219 stream_info->status = VALID;
2220 stream_info->channel = NULL;
2221 mStreamInfo.push_back(stream_info);
2222 }
2223 /* Covers Opaque ZSL and API1 F/W ZSL */
2224 if (IS_USAGE_ZSL(newStream->usage)
2225 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2226 if (zslStream != NULL) {
2227 LOGE("Multiple input/reprocess streams requested!");
2228 pthread_mutex_unlock(&mMutex);
2229 return BAD_VALUE;
2230 }
2231 zslStream = newStream;
2232 }
2233 /* Covers YUV reprocess */
2234 if (inputStream != NULL) {
2235 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2236 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2237 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2238 && inputStream->width == newStream->width
2239 && inputStream->height == newStream->height) {
2240 if (zslStream != NULL) {
2241 /* This scenario indicates multiple YUV streams with same size
2242 * as input stream have been requested, since zsl stream handle
2243 * is solely use for the purpose of overriding the size of streams
2244 * which share h/w streams we will just make a guess here as to
2245 * which of the stream is a ZSL stream, this will be refactored
2246 * once we make generic logic for streams sharing encoder output
2247 */
2248 LOGH("Warning, Multiple ip/reprocess streams requested!");
2249 }
2250 zslStream = newStream;
2251 }
2252 }
2253 }
2254
2255 /* If a zsl stream is set, we know that we have configured at least one input or
2256 bidirectional stream */
2257 if (NULL != zslStream) {
2258 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2259 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2260 mInputStreamInfo.format = zslStream->format;
2261 mInputStreamInfo.usage = zslStream->usage;
2262 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2263 mInputStreamInfo.dim.width,
2264 mInputStreamInfo.dim.height,
2265 mInputStreamInfo.format, mInputStreamInfo.usage);
2266 }
2267
2268 cleanAndSortStreamInfo();
2269 if (mMetadataChannel) {
2270 delete mMetadataChannel;
2271 mMetadataChannel = NULL;
2272 }
2273 if (mSupportChannel) {
2274 delete mSupportChannel;
2275 mSupportChannel = NULL;
2276 }
2277
2278 if (mAnalysisChannel) {
2279 delete mAnalysisChannel;
2280 mAnalysisChannel = NULL;
2281 }
2282
2283 if (mDummyBatchChannel) {
2284 delete mDummyBatchChannel;
2285 mDummyBatchChannel = NULL;
2286 }
2287
Emilian Peev7650c122017-01-19 08:24:33 -08002288 if (mDepthChannel) {
2289 mDepthChannel = NULL;
2290 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002291 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002292
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002293 mShutterDispatcher.clear();
2294 mOutputBufferDispatcher.clear();
2295
Thierry Strudel2896d122017-02-23 19:18:03 -08002296 char is_type_value[PROPERTY_VALUE_MAX];
2297 property_get("persist.camera.is_type", is_type_value, "4");
2298 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2299
Binhao Line406f062017-05-03 14:39:44 -07002300 char property_value[PROPERTY_VALUE_MAX];
2301 property_get("persist.camera.gzoom.at", property_value, "0");
2302 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002303 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2304 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2305 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2306 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002307
2308 property_get("persist.camera.gzoom.4k", property_value, "0");
2309 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2310
Thierry Strudel3d639192016-09-09 11:52:26 -07002311 //Create metadata channel and initialize it
2312 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2313 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2314 gCamCapability[mCameraId]->color_arrangement);
2315 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2316 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002317 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002318 if (mMetadataChannel == NULL) {
2319 LOGE("failed to allocate metadata channel");
2320 rc = -ENOMEM;
2321 pthread_mutex_unlock(&mMutex);
2322 return rc;
2323 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002324 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002325 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2326 if (rc < 0) {
2327 LOGE("metadata channel initialization failed");
2328 delete mMetadataChannel;
2329 mMetadataChannel = NULL;
2330 pthread_mutex_unlock(&mMutex);
2331 return rc;
2332 }
2333
Thierry Strudel2896d122017-02-23 19:18:03 -08002334 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002335 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002336 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002337 // Keep track of preview/video streams indices.
2338 // There could be more than one preview streams, but only one video stream.
2339 int32_t video_stream_idx = -1;
2340 int32_t preview_stream_idx[streamList->num_streams];
2341 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002342 bool previewTnr[streamList->num_streams];
2343 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2344 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2345 // Loop through once to determine preview TNR conditions before creating channels.
2346 for (size_t i = 0; i < streamList->num_streams; i++) {
2347 camera3_stream_t *newStream = streamList->streams[i];
2348 uint32_t stream_usage = newStream->usage;
2349 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2350 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2351 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2352 video_stream_idx = (int32_t)i;
2353 else
2354 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2355 }
2356 }
2357 // By default, preview stream TNR is disabled.
2358 // Enable TNR to the preview stream if all conditions below are satisfied:
2359 // 1. preview resolution == video resolution.
2360 // 2. video stream TNR is enabled.
2361 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2362 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2363 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2364 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2365 if (m_bTnrEnabled && m_bTnrVideo &&
2366 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2367 video_stream->width == preview_stream->width &&
2368 video_stream->height == preview_stream->height) {
2369 previewTnr[preview_stream_idx[i]] = true;
2370 }
2371 }
2372
Thierry Strudel3d639192016-09-09 11:52:26 -07002373 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2374 /* Allocate channel objects for the requested streams */
2375 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002376
Thierry Strudel3d639192016-09-09 11:52:26 -07002377 camera3_stream_t *newStream = streamList->streams[i];
2378 uint32_t stream_usage = newStream->usage;
2379 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2380 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2381 struct camera_info *p_info = NULL;
2382 pthread_mutex_lock(&gCamLock);
2383 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2384 pthread_mutex_unlock(&gCamLock);
2385 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2386 || IS_USAGE_ZSL(newStream->usage)) &&
2387 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002388 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002390 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2391 if (bUseCommonFeatureMask)
2392 zsl_ppmask = commonFeatureMask;
2393 else
2394 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002395 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002396 if (numStreamsOnEncoder > 0)
2397 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2398 else
2399 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002400 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002401 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002402 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002403 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002404 LOGH("Input stream configured, reprocess config");
2405 } else {
2406 //for non zsl streams find out the format
2407 switch (newStream->format) {
2408 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2409 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002410 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002411 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2412 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2413 /* add additional features to pp feature mask */
2414 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2415 mStreamConfigInfo.num_streams);
2416
2417 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2418 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2419 CAM_STREAM_TYPE_VIDEO;
2420 if (m_bTnrEnabled && m_bTnrVideo) {
2421 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2422 CAM_QCOM_FEATURE_CPP_TNR;
2423 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2424 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2425 ~CAM_QCOM_FEATURE_CDS;
2426 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002427 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2428 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2429 CAM_QTI_FEATURE_PPEISCORE;
2430 }
Binhao Line406f062017-05-03 14:39:44 -07002431 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2432 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2433 CAM_QCOM_FEATURE_GOOG_ZOOM;
2434 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002435 } else {
2436 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2437 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002438 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002439 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2440 CAM_QCOM_FEATURE_CPP_TNR;
2441 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2442 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2443 ~CAM_QCOM_FEATURE_CDS;
2444 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002445 if(!m_bSwTnrPreview) {
2446 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2447 ~CAM_QTI_FEATURE_SW_TNR;
2448 }
Binhao Line406f062017-05-03 14:39:44 -07002449 if (is_goog_zoom_preview_enabled) {
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2451 CAM_QCOM_FEATURE_GOOG_ZOOM;
2452 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002453 padding_info.width_padding = mSurfaceStridePadding;
2454 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002455 previewSize.width = (int32_t)newStream->width;
2456 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002457 }
2458 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2459 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2460 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2461 newStream->height;
2462 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2463 newStream->width;
2464 }
2465 }
2466 break;
2467 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002468 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002469 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2470 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2471 if (bUseCommonFeatureMask)
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2473 commonFeatureMask;
2474 else
2475 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2476 CAM_QCOM_FEATURE_NONE;
2477 } else {
2478 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2479 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2480 }
2481 break;
2482 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002483 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002484 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2485 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2486 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2487 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2488 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002489 /* Remove rotation if it is not supported
2490 for 4K LiveVideo snapshot case (online processing) */
2491 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2492 CAM_QCOM_FEATURE_ROTATION)) {
2493 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2494 &= ~CAM_QCOM_FEATURE_ROTATION;
2495 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002496 } else {
2497 if (bUseCommonFeatureMask &&
2498 isOnEncoder(maxViewfinderSize, newStream->width,
2499 newStream->height)) {
2500 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2501 } else {
2502 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2503 }
2504 }
2505 if (isZsl) {
2506 if (zslStream) {
2507 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2508 (int32_t)zslStream->width;
2509 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2510 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002511 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2512 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002513 } else {
2514 LOGE("Error, No ZSL stream identified");
2515 pthread_mutex_unlock(&mMutex);
2516 return -EINVAL;
2517 }
2518 } else if (m_bIs4KVideo) {
2519 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2520 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2521 } else if (bYuv888OverrideJpeg) {
2522 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2523 (int32_t)largeYuv888Size.width;
2524 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2525 (int32_t)largeYuv888Size.height;
2526 }
2527 break;
2528 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2529 case HAL_PIXEL_FORMAT_RAW16:
2530 case HAL_PIXEL_FORMAT_RAW10:
2531 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2532 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2533 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002534 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2535 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2536 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2537 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2538 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2539 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2540 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2541 gCamCapability[mCameraId]->dt[mPDIndex];
2542 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2543 gCamCapability[mCameraId]->vc[mPDIndex];
2544 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002545 break;
2546 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002547 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002548 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2549 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2550 break;
2551 }
2552 }
2553
2554 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2555 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2556 gCamCapability[mCameraId]->color_arrangement);
2557
2558 if (newStream->priv == NULL) {
2559 //New stream, construct channel
2560 switch (newStream->stream_type) {
2561 case CAMERA3_STREAM_INPUT:
2562 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2563 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2564 break;
2565 case CAMERA3_STREAM_BIDIRECTIONAL:
2566 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2567 GRALLOC_USAGE_HW_CAMERA_WRITE;
2568 break;
2569 case CAMERA3_STREAM_OUTPUT:
2570 /* For video encoding stream, set read/write rarely
2571 * flag so that they may be set to un-cached */
2572 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2573 newStream->usage |=
2574 (GRALLOC_USAGE_SW_READ_RARELY |
2575 GRALLOC_USAGE_SW_WRITE_RARELY |
2576 GRALLOC_USAGE_HW_CAMERA_WRITE);
2577 else if (IS_USAGE_ZSL(newStream->usage))
2578 {
2579 LOGD("ZSL usage flag skipping");
2580 }
2581 else if (newStream == zslStream
2582 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2583 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2584 } else
2585 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2586 break;
2587 default:
2588 LOGE("Invalid stream_type %d", newStream->stream_type);
2589 break;
2590 }
2591
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002592 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002593 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2594 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2595 QCamera3ProcessingChannel *channel = NULL;
2596 switch (newStream->format) {
2597 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2598 if ((newStream->usage &
2599 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2600 (streamList->operation_mode ==
2601 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2602 ) {
2603 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2604 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002605 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002606 this,
2607 newStream,
2608 (cam_stream_type_t)
2609 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2610 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2611 mMetadataChannel,
2612 0); //heap buffers are not required for HFR video channel
2613 if (channel == NULL) {
2614 LOGE("allocation of channel failed");
2615 pthread_mutex_unlock(&mMutex);
2616 return -ENOMEM;
2617 }
2618 //channel->getNumBuffers() will return 0 here so use
2619 //MAX_INFLIGH_HFR_REQUESTS
2620 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2621 newStream->priv = channel;
2622 LOGI("num video buffers in HFR mode: %d",
2623 MAX_INFLIGHT_HFR_REQUESTS);
2624 } else {
2625 /* Copy stream contents in HFR preview only case to create
2626 * dummy batch channel so that sensor streaming is in
2627 * HFR mode */
2628 if (!m_bIsVideo && (streamList->operation_mode ==
2629 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2630 mDummyBatchStream = *newStream;
2631 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002632 int bufferCount = MAX_INFLIGHT_REQUESTS;
2633 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2634 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002635 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2636 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2637 bufferCount = m_bIs4KVideo ?
2638 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2639 }
2640
Thierry Strudel2896d122017-02-23 19:18:03 -08002641 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002642 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2643 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002644 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002645 this,
2646 newStream,
2647 (cam_stream_type_t)
2648 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2649 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2650 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002651 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002652 if (channel == NULL) {
2653 LOGE("allocation of channel failed");
2654 pthread_mutex_unlock(&mMutex);
2655 return -ENOMEM;
2656 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002657 /* disable UBWC for preview, though supported,
2658 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002659 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002660 (previewSize.width == (int32_t)videoWidth)&&
2661 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002662 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002663 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002664 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002665 /* When goog_zoom is linked to the preview or video stream,
2666 * disable ubwc to the linked stream */
2667 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2668 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2669 channel->setUBWCEnabled(false);
2670 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 newStream->max_buffers = channel->getNumBuffers();
2672 newStream->priv = channel;
2673 }
2674 break;
2675 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2676 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2677 mChannelHandle,
2678 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002679 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002680 this,
2681 newStream,
2682 (cam_stream_type_t)
2683 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2684 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2685 mMetadataChannel);
2686 if (channel == NULL) {
2687 LOGE("allocation of YUV channel failed");
2688 pthread_mutex_unlock(&mMutex);
2689 return -ENOMEM;
2690 }
2691 newStream->max_buffers = channel->getNumBuffers();
2692 newStream->priv = channel;
2693 break;
2694 }
2695 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2696 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002697 case HAL_PIXEL_FORMAT_RAW10: {
2698 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2699 (HAL_DATASPACE_DEPTH != newStream->data_space))
2700 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002701 mRawChannel = new QCamera3RawChannel(
2702 mCameraHandle->camera_handle, mChannelHandle,
2703 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002704 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002705 this, newStream,
2706 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002707 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002708 if (mRawChannel == NULL) {
2709 LOGE("allocation of raw channel failed");
2710 pthread_mutex_unlock(&mMutex);
2711 return -ENOMEM;
2712 }
2713 newStream->max_buffers = mRawChannel->getNumBuffers();
2714 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2715 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002716 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002717 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002718 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2719 mDepthChannel = new QCamera3DepthChannel(
2720 mCameraHandle->camera_handle, mChannelHandle,
2721 mCameraHandle->ops, NULL, NULL, &padding_info,
2722 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2723 mMetadataChannel);
2724 if (NULL == mDepthChannel) {
2725 LOGE("Allocation of depth channel failed");
2726 pthread_mutex_unlock(&mMutex);
2727 return NO_MEMORY;
2728 }
2729 newStream->priv = mDepthChannel;
2730 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2731 } else {
2732 // Max live snapshot inflight buffer is 1. This is to mitigate
2733 // frame drop issues for video snapshot. The more buffers being
2734 // allocated, the more frame drops there are.
2735 mPictureChannel = new QCamera3PicChannel(
2736 mCameraHandle->camera_handle, mChannelHandle,
2737 mCameraHandle->ops, captureResultCb,
2738 setBufferErrorStatus, &padding_info, this, newStream,
2739 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2740 m_bIs4KVideo, isZsl, mMetadataChannel,
2741 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2742 if (mPictureChannel == NULL) {
2743 LOGE("allocation of channel failed");
2744 pthread_mutex_unlock(&mMutex);
2745 return -ENOMEM;
2746 }
2747 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2748 newStream->max_buffers = mPictureChannel->getNumBuffers();
2749 mPictureChannel->overrideYuvSize(
2750 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2751 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002752 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002753 break;
2754
2755 default:
2756 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002757 pthread_mutex_unlock(&mMutex);
2758 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002759 }
2760 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2761 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2762 } else {
2763 LOGE("Error, Unknown stream type");
2764 pthread_mutex_unlock(&mMutex);
2765 return -EINVAL;
2766 }
2767
2768 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002769 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002770 // Here we only care whether it's EIS3 or not
2771 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2772 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2773 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2774 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002775 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002776 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002777 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002778 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2779 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2780 }
2781 }
2782
2783 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2784 it != mStreamInfo.end(); it++) {
2785 if ((*it)->stream == newStream) {
2786 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2787 break;
2788 }
2789 }
2790 } else {
2791 // Channel already exists for this stream
2792 // Do nothing for now
2793 }
2794 padding_info = gCamCapability[mCameraId]->padding_info;
2795
Emilian Peev7650c122017-01-19 08:24:33 -08002796 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002797 * since there is no real stream associated with it
2798 */
Emilian Peev7650c122017-01-19 08:24:33 -08002799 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002800 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2801 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002802 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002803 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 }
2805
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002806 // Let buffer dispatcher know the configured streams.
2807 mOutputBufferDispatcher.configureStreams(streamList);
2808
Thierry Strudel2896d122017-02-23 19:18:03 -08002809 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2810 onlyRaw = false;
2811 }
2812
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002813 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002814 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002815 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002816 cam_analysis_info_t analysisInfo;
2817 int32_t ret = NO_ERROR;
2818 ret = mCommon.getAnalysisInfo(
2819 FALSE,
2820 analysisFeatureMask,
2821 &analysisInfo);
2822 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002823 cam_color_filter_arrangement_t analysis_color_arrangement =
2824 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2825 CAM_FILTER_ARRANGEMENT_Y :
2826 gCamCapability[mCameraId]->color_arrangement);
2827 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2828 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002829 cam_dimension_t analysisDim;
2830 analysisDim = mCommon.getMatchingDimension(previewSize,
2831 analysisInfo.analysis_recommended_res);
2832
2833 mAnalysisChannel = new QCamera3SupportChannel(
2834 mCameraHandle->camera_handle,
2835 mChannelHandle,
2836 mCameraHandle->ops,
2837 &analysisInfo.analysis_padding_info,
2838 analysisFeatureMask,
2839 CAM_STREAM_TYPE_ANALYSIS,
2840 &analysisDim,
2841 (analysisInfo.analysis_format
2842 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2843 : CAM_FORMAT_YUV_420_NV21),
2844 analysisInfo.hw_analysis_supported,
2845 gCamCapability[mCameraId]->color_arrangement,
2846 this,
2847 0); // force buffer count to 0
2848 } else {
2849 LOGW("getAnalysisInfo failed, ret = %d", ret);
2850 }
2851 if (!mAnalysisChannel) {
2852 LOGW("Analysis channel cannot be created");
2853 }
2854 }
2855
Thierry Strudel3d639192016-09-09 11:52:26 -07002856 //RAW DUMP channel
2857 if (mEnableRawDump && isRawStreamRequested == false){
2858 cam_dimension_t rawDumpSize;
2859 rawDumpSize = getMaxRawSize(mCameraId);
2860 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2861 setPAAFSupport(rawDumpFeatureMask,
2862 CAM_STREAM_TYPE_RAW,
2863 gCamCapability[mCameraId]->color_arrangement);
2864 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2865 mChannelHandle,
2866 mCameraHandle->ops,
2867 rawDumpSize,
2868 &padding_info,
2869 this, rawDumpFeatureMask);
2870 if (!mRawDumpChannel) {
2871 LOGE("Raw Dump channel cannot be created");
2872 pthread_mutex_unlock(&mMutex);
2873 return -ENOMEM;
2874 }
2875 }
2876
Thierry Strudel3d639192016-09-09 11:52:26 -07002877 if (mAnalysisChannel) {
2878 cam_analysis_info_t analysisInfo;
2879 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2880 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2881 CAM_STREAM_TYPE_ANALYSIS;
2882 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2883 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002884 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002885 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2886 &analysisInfo);
2887 if (rc != NO_ERROR) {
2888 LOGE("getAnalysisInfo failed, ret = %d", rc);
2889 pthread_mutex_unlock(&mMutex);
2890 return rc;
2891 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002892 cam_color_filter_arrangement_t analysis_color_arrangement =
2893 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2894 CAM_FILTER_ARRANGEMENT_Y :
2895 gCamCapability[mCameraId]->color_arrangement);
2896 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2897 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2898 analysis_color_arrangement);
2899
Thierry Strudel3d639192016-09-09 11:52:26 -07002900 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002901 mCommon.getMatchingDimension(previewSize,
2902 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 mStreamConfigInfo.num_streams++;
2904 }
2905
Thierry Strudel2896d122017-02-23 19:18:03 -08002906 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002907 cam_analysis_info_t supportInfo;
2908 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2909 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2910 setPAAFSupport(callbackFeatureMask,
2911 CAM_STREAM_TYPE_CALLBACK,
2912 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002913 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002914 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002915 if (ret != NO_ERROR) {
2916 /* Ignore the error for Mono camera
2917 * because the PAAF bit mask is only set
2918 * for CAM_STREAM_TYPE_ANALYSIS stream type
2919 */
2920 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2921 LOGW("getAnalysisInfo failed, ret = %d", ret);
2922 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002923 }
2924 mSupportChannel = new QCamera3SupportChannel(
2925 mCameraHandle->camera_handle,
2926 mChannelHandle,
2927 mCameraHandle->ops,
2928 &gCamCapability[mCameraId]->padding_info,
2929 callbackFeatureMask,
2930 CAM_STREAM_TYPE_CALLBACK,
2931 &QCamera3SupportChannel::kDim,
2932 CAM_FORMAT_YUV_420_NV21,
2933 supportInfo.hw_analysis_supported,
2934 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002935 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002936 if (!mSupportChannel) {
2937 LOGE("dummy channel cannot be created");
2938 pthread_mutex_unlock(&mMutex);
2939 return -ENOMEM;
2940 }
2941 }
2942
2943 if (mSupportChannel) {
2944 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2945 QCamera3SupportChannel::kDim;
2946 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2947 CAM_STREAM_TYPE_CALLBACK;
2948 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2949 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2950 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2951 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2952 gCamCapability[mCameraId]->color_arrangement);
2953 mStreamConfigInfo.num_streams++;
2954 }
2955
2956 if (mRawDumpChannel) {
2957 cam_dimension_t rawSize;
2958 rawSize = getMaxRawSize(mCameraId);
2959 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2960 rawSize;
2961 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2962 CAM_STREAM_TYPE_RAW;
2963 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2964 CAM_QCOM_FEATURE_NONE;
2965 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2966 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2967 gCamCapability[mCameraId]->color_arrangement);
2968 mStreamConfigInfo.num_streams++;
2969 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002970
2971 if (mHdrPlusRawSrcChannel) {
2972 cam_dimension_t rawSize;
2973 rawSize = getMaxRawSize(mCameraId);
2974 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2975 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2976 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2977 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2978 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2979 gCamCapability[mCameraId]->color_arrangement);
2980 mStreamConfigInfo.num_streams++;
2981 }
2982
Thierry Strudel3d639192016-09-09 11:52:26 -07002983 /* In HFR mode, if video stream is not added, create a dummy channel so that
2984 * ISP can create a batch mode even for preview only case. This channel is
2985 * never 'start'ed (no stream-on), it is only 'initialized' */
2986 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2987 !m_bIsVideo) {
2988 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2989 setPAAFSupport(dummyFeatureMask,
2990 CAM_STREAM_TYPE_VIDEO,
2991 gCamCapability[mCameraId]->color_arrangement);
2992 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2993 mChannelHandle,
2994 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002995 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002996 this,
2997 &mDummyBatchStream,
2998 CAM_STREAM_TYPE_VIDEO,
2999 dummyFeatureMask,
3000 mMetadataChannel);
3001 if (NULL == mDummyBatchChannel) {
3002 LOGE("creation of mDummyBatchChannel failed."
3003 "Preview will use non-hfr sensor mode ");
3004 }
3005 }
3006 if (mDummyBatchChannel) {
3007 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3008 mDummyBatchStream.width;
3009 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3010 mDummyBatchStream.height;
3011 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3012 CAM_STREAM_TYPE_VIDEO;
3013 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3014 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3015 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3016 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3017 gCamCapability[mCameraId]->color_arrangement);
3018 mStreamConfigInfo.num_streams++;
3019 }
3020
3021 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3022 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003023 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003024 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003025
3026 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3027 for (pendingRequestIterator i = mPendingRequestsList.begin();
3028 i != mPendingRequestsList.end();) {
3029 i = erasePendingRequest(i);
3030 }
3031 mPendingFrameDropList.clear();
3032 // Initialize/Reset the pending buffers list
3033 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3034 req.mPendingBufferList.clear();
3035 }
3036 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003037 mExpectedInflightDuration = 0;
3038 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003039
Thierry Strudel3d639192016-09-09 11:52:26 -07003040 mCurJpegMeta.clear();
3041 //Get min frame duration for this streams configuration
3042 deriveMinFrameDuration();
3043
Chien-Yu Chenee335912017-02-09 17:53:20 -08003044 mFirstPreviewIntentSeen = false;
3045
3046 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003047 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003048 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3049 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003050 disableHdrPlusModeLocked();
3051 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003052
Thierry Strudel3d639192016-09-09 11:52:26 -07003053 // Update state
3054 mState = CONFIGURED;
3055
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003056 mFirstMetadataCallback = true;
3057
Thierry Strudel3d639192016-09-09 11:52:26 -07003058 pthread_mutex_unlock(&mMutex);
3059
3060 return rc;
3061}
3062
3063/*===========================================================================
3064 * FUNCTION : validateCaptureRequest
3065 *
3066 * DESCRIPTION: validate a capture request from camera service
3067 *
3068 * PARAMETERS :
3069 * @request : request from framework to process
3070 *
3071 * RETURN :
3072 *
3073 *==========================================================================*/
3074int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003075 camera3_capture_request_t *request,
3076 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003077{
3078 ssize_t idx = 0;
3079 const camera3_stream_buffer_t *b;
3080 CameraMetadata meta;
3081
3082 /* Sanity check the request */
3083 if (request == NULL) {
3084 LOGE("NULL capture request");
3085 return BAD_VALUE;
3086 }
3087
3088 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3089 /*settings cannot be null for the first request*/
3090 return BAD_VALUE;
3091 }
3092
3093 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003094 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3095 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003096 LOGE("Request %d: No output buffers provided!",
3097 __FUNCTION__, frameNumber);
3098 return BAD_VALUE;
3099 }
3100 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3101 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3102 request->num_output_buffers, MAX_NUM_STREAMS);
3103 return BAD_VALUE;
3104 }
3105 if (request->input_buffer != NULL) {
3106 b = request->input_buffer;
3107 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3108 LOGE("Request %d: Buffer %ld: Status not OK!",
3109 frameNumber, (long)idx);
3110 return BAD_VALUE;
3111 }
3112 if (b->release_fence != -1) {
3113 LOGE("Request %d: Buffer %ld: Has a release fence!",
3114 frameNumber, (long)idx);
3115 return BAD_VALUE;
3116 }
3117 if (b->buffer == NULL) {
3118 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3119 frameNumber, (long)idx);
3120 return BAD_VALUE;
3121 }
3122 }
3123
3124 // Validate all buffers
3125 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003126 if (b == NULL) {
3127 return BAD_VALUE;
3128 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003129 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003130 QCamera3ProcessingChannel *channel =
3131 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3132 if (channel == NULL) {
3133 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3134 frameNumber, (long)idx);
3135 return BAD_VALUE;
3136 }
3137 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3138 LOGE("Request %d: Buffer %ld: Status not OK!",
3139 frameNumber, (long)idx);
3140 return BAD_VALUE;
3141 }
3142 if (b->release_fence != -1) {
3143 LOGE("Request %d: Buffer %ld: Has a release fence!",
3144 frameNumber, (long)idx);
3145 return BAD_VALUE;
3146 }
3147 if (b->buffer == NULL) {
3148 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3149 frameNumber, (long)idx);
3150 return BAD_VALUE;
3151 }
3152 if (*(b->buffer) == NULL) {
3153 LOGE("Request %d: Buffer %ld: NULL private handle!",
3154 frameNumber, (long)idx);
3155 return BAD_VALUE;
3156 }
3157 idx++;
3158 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003159 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003160 return NO_ERROR;
3161}
3162
3163/*===========================================================================
3164 * FUNCTION : deriveMinFrameDuration
3165 *
3166 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3167 * on currently configured streams.
3168 *
3169 * PARAMETERS : NONE
3170 *
3171 * RETURN : NONE
3172 *
3173 *==========================================================================*/
3174void QCamera3HardwareInterface::deriveMinFrameDuration()
3175{
3176 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003177 bool hasRaw = false;
3178
3179 mMinRawFrameDuration = 0;
3180 mMinJpegFrameDuration = 0;
3181 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003182
3183 maxJpegDim = 0;
3184 maxProcessedDim = 0;
3185 maxRawDim = 0;
3186
3187 // Figure out maximum jpeg, processed, and raw dimensions
3188 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3189 it != mStreamInfo.end(); it++) {
3190
3191 // Input stream doesn't have valid stream_type
3192 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3193 continue;
3194
3195 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3196 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3197 if (dimension > maxJpegDim)
3198 maxJpegDim = dimension;
3199 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3200 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3201 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003202 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003203 if (dimension > maxRawDim)
3204 maxRawDim = dimension;
3205 } else {
3206 if (dimension > maxProcessedDim)
3207 maxProcessedDim = dimension;
3208 }
3209 }
3210
3211 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3212 MAX_SIZES_CNT);
3213
3214 //Assume all jpeg dimensions are in processed dimensions.
3215 if (maxJpegDim > maxProcessedDim)
3216 maxProcessedDim = maxJpegDim;
3217 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003218 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003219 maxRawDim = INT32_MAX;
3220
3221 for (size_t i = 0; i < count; i++) {
3222 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3223 gCamCapability[mCameraId]->raw_dim[i].height;
3224 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3225 maxRawDim = dimension;
3226 }
3227 }
3228
3229 //Find minimum durations for processed, jpeg, and raw
3230 for (size_t i = 0; i < count; i++) {
3231 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3232 gCamCapability[mCameraId]->raw_dim[i].height) {
3233 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3234 break;
3235 }
3236 }
3237 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3238 for (size_t i = 0; i < count; i++) {
3239 if (maxProcessedDim ==
3240 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3241 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3242 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3243 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3244 break;
3245 }
3246 }
3247}
3248
3249/*===========================================================================
3250 * FUNCTION : getMinFrameDuration
3251 *
3252 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3253 * and current request configuration.
3254 *
3255 * PARAMETERS : @request: requset sent by the frameworks
3256 *
3257 * RETURN : min farme duration for a particular request
3258 *
3259 *==========================================================================*/
3260int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3261{
3262 bool hasJpegStream = false;
3263 bool hasRawStream = false;
3264 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3265 const camera3_stream_t *stream = request->output_buffers[i].stream;
3266 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3267 hasJpegStream = true;
3268 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3269 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3270 stream->format == HAL_PIXEL_FORMAT_RAW16)
3271 hasRawStream = true;
3272 }
3273
3274 if (!hasJpegStream)
3275 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3276 else
3277 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3278}
3279
3280/*===========================================================================
3281 * FUNCTION : handleBuffersDuringFlushLock
3282 *
3283 * DESCRIPTION: Account for buffers returned from back-end during flush
3284 * This function is executed while mMutex is held by the caller.
3285 *
3286 * PARAMETERS :
3287 * @buffer: image buffer for the callback
3288 *
3289 * RETURN :
3290 *==========================================================================*/
3291void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3292{
3293 bool buffer_found = false;
3294 for (List<PendingBuffersInRequest>::iterator req =
3295 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3296 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3297 for (List<PendingBufferInfo>::iterator i =
3298 req->mPendingBufferList.begin();
3299 i != req->mPendingBufferList.end(); i++) {
3300 if (i->buffer == buffer->buffer) {
3301 mPendingBuffersMap.numPendingBufsAtFlush--;
3302 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3303 buffer->buffer, req->frame_number,
3304 mPendingBuffersMap.numPendingBufsAtFlush);
3305 buffer_found = true;
3306 break;
3307 }
3308 }
3309 if (buffer_found) {
3310 break;
3311 }
3312 }
3313 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3314 //signal the flush()
3315 LOGD("All buffers returned to HAL. Continue flush");
3316 pthread_cond_signal(&mBuffersCond);
3317 }
3318}
3319
Thierry Strudel3d639192016-09-09 11:52:26 -07003320/*===========================================================================
3321 * FUNCTION : handleBatchMetadata
3322 *
3323 * DESCRIPTION: Handles metadata buffer callback in batch mode
3324 *
3325 * PARAMETERS : @metadata_buf: metadata buffer
3326 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3327 * the meta buf in this method
3328 *
3329 * RETURN :
3330 *
3331 *==========================================================================*/
3332void QCamera3HardwareInterface::handleBatchMetadata(
3333 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3334{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003335 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003336
3337 if (NULL == metadata_buf) {
3338 LOGE("metadata_buf is NULL");
3339 return;
3340 }
3341 /* In batch mode, the metdata will contain the frame number and timestamp of
3342 * the last frame in the batch. Eg: a batch containing buffers from request
3343 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3344 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3345 * multiple process_capture_results */
3346 metadata_buffer_t *metadata =
3347 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3348 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3349 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3350 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3351 uint32_t frame_number = 0, urgent_frame_number = 0;
3352 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3353 bool invalid_metadata = false;
3354 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3355 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003356 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003357
3358 int32_t *p_frame_number_valid =
3359 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3360 uint32_t *p_frame_number =
3361 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3362 int64_t *p_capture_time =
3363 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3364 int32_t *p_urgent_frame_number_valid =
3365 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3366 uint32_t *p_urgent_frame_number =
3367 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3368
3369 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3370 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3371 (NULL == p_urgent_frame_number)) {
3372 LOGE("Invalid metadata");
3373 invalid_metadata = true;
3374 } else {
3375 frame_number_valid = *p_frame_number_valid;
3376 last_frame_number = *p_frame_number;
3377 last_frame_capture_time = *p_capture_time;
3378 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3379 last_urgent_frame_number = *p_urgent_frame_number;
3380 }
3381
3382 /* In batchmode, when no video buffers are requested, set_parms are sent
3383 * for every capture_request. The difference between consecutive urgent
3384 * frame numbers and frame numbers should be used to interpolate the
3385 * corresponding frame numbers and time stamps */
3386 pthread_mutex_lock(&mMutex);
3387 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003388 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3389 if(idx < 0) {
3390 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3391 last_urgent_frame_number);
3392 mState = ERROR;
3393 pthread_mutex_unlock(&mMutex);
3394 return;
3395 }
3396 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003397 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3398 first_urgent_frame_number;
3399
3400 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3401 urgent_frame_number_valid,
3402 first_urgent_frame_number, last_urgent_frame_number);
3403 }
3404
3405 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003406 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3407 if(idx < 0) {
3408 LOGE("Invalid frame number received: %d. Irrecoverable error",
3409 last_frame_number);
3410 mState = ERROR;
3411 pthread_mutex_unlock(&mMutex);
3412 return;
3413 }
3414 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003415 frameNumDiff = last_frame_number + 1 -
3416 first_frame_number;
3417 mPendingBatchMap.removeItem(last_frame_number);
3418
3419 LOGD("frm: valid: %d frm_num: %d - %d",
3420 frame_number_valid,
3421 first_frame_number, last_frame_number);
3422
3423 }
3424 pthread_mutex_unlock(&mMutex);
3425
3426 if (urgent_frame_number_valid || frame_number_valid) {
3427 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3428 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3429 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3430 urgentFrameNumDiff, last_urgent_frame_number);
3431 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3432 LOGE("frameNumDiff: %d frameNum: %d",
3433 frameNumDiff, last_frame_number);
3434 }
3435
3436 for (size_t i = 0; i < loopCount; i++) {
3437 /* handleMetadataWithLock is called even for invalid_metadata for
3438 * pipeline depth calculation */
3439 if (!invalid_metadata) {
3440 /* Infer frame number. Batch metadata contains frame number of the
3441 * last frame */
3442 if (urgent_frame_number_valid) {
3443 if (i < urgentFrameNumDiff) {
3444 urgent_frame_number =
3445 first_urgent_frame_number + i;
3446 LOGD("inferred urgent frame_number: %d",
3447 urgent_frame_number);
3448 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3449 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3450 } else {
3451 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3452 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3453 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3454 }
3455 }
3456
3457 /* Infer frame number. Batch metadata contains frame number of the
3458 * last frame */
3459 if (frame_number_valid) {
3460 if (i < frameNumDiff) {
3461 frame_number = first_frame_number + i;
3462 LOGD("inferred frame_number: %d", frame_number);
3463 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3464 CAM_INTF_META_FRAME_NUMBER, frame_number);
3465 } else {
3466 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3467 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3469 }
3470 }
3471
3472 if (last_frame_capture_time) {
3473 //Infer timestamp
3474 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003475 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003476 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003477 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003478 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3479 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3480 LOGD("batch capture_time: %lld, capture_time: %lld",
3481 last_frame_capture_time, capture_time);
3482 }
3483 }
3484 pthread_mutex_lock(&mMutex);
3485 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003486 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003487 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3488 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003489 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003490 pthread_mutex_unlock(&mMutex);
3491 }
3492
3493 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003494 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 mMetadataChannel->bufDone(metadata_buf);
3496 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003497 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003498 }
3499}
3500
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003501void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3502 camera3_error_msg_code_t errorCode)
3503{
3504 camera3_notify_msg_t notify_msg;
3505 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3506 notify_msg.type = CAMERA3_MSG_ERROR;
3507 notify_msg.message.error.error_code = errorCode;
3508 notify_msg.message.error.error_stream = NULL;
3509 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003510 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003511
3512 return;
3513}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003514
3515/*===========================================================================
3516 * FUNCTION : sendPartialMetadataWithLock
3517 *
3518 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3519 *
3520 * PARAMETERS : @metadata: metadata buffer
3521 * @requestIter: The iterator for the pending capture request for
3522 * which the partial result is being sen
3523 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3524 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003525 * @isJumpstartMetadata: Whether this is a partial metadata for
3526 * jumpstart, i.e. even though it doesn't map to a valid partial
3527 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003528 *
3529 * RETURN :
3530 *
3531 *==========================================================================*/
3532
3533void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3534 metadata_buffer_t *metadata,
3535 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003536 bool lastUrgentMetadataInBatch,
3537 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003538{
3539 camera3_capture_result_t result;
3540 memset(&result, 0, sizeof(camera3_capture_result_t));
3541
3542 requestIter->partial_result_cnt++;
3543
3544 // Extract 3A metadata
3545 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003546 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3547 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003548 // Populate metadata result
3549 result.frame_number = requestIter->frame_number;
3550 result.num_output_buffers = 0;
3551 result.output_buffers = NULL;
3552 result.partial_result = requestIter->partial_result_cnt;
3553
3554 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003555 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003556 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3557 // Notify HDR+ client about the partial metadata.
3558 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3559 result.partial_result == PARTIAL_RESULT_COUNT);
3560 }
3561 }
3562
3563 orchestrateResult(&result);
3564 LOGD("urgent frame_number = %u", result.frame_number);
3565 free_camera_metadata((camera_metadata_t *)result.result);
3566}
3567
Thierry Strudel3d639192016-09-09 11:52:26 -07003568/*===========================================================================
3569 * FUNCTION : handleMetadataWithLock
3570 *
3571 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3572 *
3573 * PARAMETERS : @metadata_buf: metadata buffer
3574 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3575 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003576 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3577 * last urgent metadata in a batch. Always true for non-batch mode
3578 * @lastMetadataInBatch: Boolean to indicate whether this is the
3579 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003580 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3581 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003582 *
3583 * RETURN :
3584 *
3585 *==========================================================================*/
3586void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003587 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003588 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3589 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003590{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003591 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003592 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3593 //during flush do not send metadata from this thread
3594 LOGD("not sending metadata during flush or when mState is error");
3595 if (free_and_bufdone_meta_buf) {
3596 mMetadataChannel->bufDone(metadata_buf);
3597 free(metadata_buf);
3598 }
3599 return;
3600 }
3601
3602 //not in flush
3603 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3604 int32_t frame_number_valid, urgent_frame_number_valid;
3605 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003606 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003607 nsecs_t currentSysTime;
3608
3609 int32_t *p_frame_number_valid =
3610 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3611 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3612 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003613 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003614 int32_t *p_urgent_frame_number_valid =
3615 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3616 uint32_t *p_urgent_frame_number =
3617 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3618 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3619 metadata) {
3620 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3621 *p_frame_number_valid, *p_frame_number);
3622 }
3623
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003624 camera_metadata_t *resultMetadata = nullptr;
3625
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3627 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3628 LOGE("Invalid metadata");
3629 if (free_and_bufdone_meta_buf) {
3630 mMetadataChannel->bufDone(metadata_buf);
3631 free(metadata_buf);
3632 }
3633 goto done_metadata;
3634 }
3635 frame_number_valid = *p_frame_number_valid;
3636 frame_number = *p_frame_number;
3637 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003638 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003639 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3640 urgent_frame_number = *p_urgent_frame_number;
3641 currentSysTime = systemTime(CLOCK_MONOTONIC);
3642
Jason Lee603176d2017-05-31 11:43:27 -07003643 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3644 const int tries = 3;
3645 nsecs_t bestGap, measured;
3646 for (int i = 0; i < tries; ++i) {
3647 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3648 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3649 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3650 const nsecs_t gap = tmono2 - tmono;
3651 if (i == 0 || gap < bestGap) {
3652 bestGap = gap;
3653 measured = tbase - ((tmono + tmono2) >> 1);
3654 }
3655 }
3656 capture_time -= measured;
3657 }
3658
Thierry Strudel3d639192016-09-09 11:52:26 -07003659 // Detect if buffers from any requests are overdue
3660 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003661 int64_t timeout;
3662 {
3663 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3664 // If there is a pending HDR+ request, the following requests may be blocked until the
3665 // HDR+ request is done. So allow a longer timeout.
3666 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3667 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003668 if (timeout < mExpectedInflightDuration) {
3669 timeout = mExpectedInflightDuration;
3670 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003671 }
3672
3673 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003674 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003675 assert(missed.stream->priv);
3676 if (missed.stream->priv) {
3677 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3678 assert(ch->mStreams[0]);
3679 if (ch->mStreams[0]) {
3680 LOGE("Cancel missing frame = %d, buffer = %p,"
3681 "stream type = %d, stream format = %d",
3682 req.frame_number, missed.buffer,
3683 ch->mStreams[0]->getMyType(), missed.stream->format);
3684 ch->timeoutFrame(req.frame_number);
3685 }
3686 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003687 }
3688 }
3689 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003690 //For the very first metadata callback, regardless whether it contains valid
3691 //frame number, send the partial metadata for the jumpstarting requests.
3692 //Note that this has to be done even if the metadata doesn't contain valid
3693 //urgent frame number, because in the case only 1 request is ever submitted
3694 //to HAL, there won't be subsequent valid urgent frame number.
3695 if (mFirstMetadataCallback) {
3696 for (pendingRequestIterator i =
3697 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3698 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003699 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3700 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003701 }
3702 }
3703 mFirstMetadataCallback = false;
3704 }
3705
Thierry Strudel3d639192016-09-09 11:52:26 -07003706 //Partial result on process_capture_result for timestamp
3707 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003708 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003709
3710 //Recieved an urgent Frame Number, handle it
3711 //using partial results
3712 for (pendingRequestIterator i =
3713 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3714 LOGD("Iterator Frame = %d urgent frame = %d",
3715 i->frame_number, urgent_frame_number);
3716
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003717 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003718 (i->partial_result_cnt == 0)) {
3719 LOGE("Error: HAL missed urgent metadata for frame number %d",
3720 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003721 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003722 }
3723
3724 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003725 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003726 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3727 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003728 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3729 // Instant AEC settled for this frame.
3730 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3731 mInstantAECSettledFrameNumber = urgent_frame_number;
3732 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003733 break;
3734 }
3735 }
3736 }
3737
3738 if (!frame_number_valid) {
3739 LOGD("Not a valid normal frame number, used as SOF only");
3740 if (free_and_bufdone_meta_buf) {
3741 mMetadataChannel->bufDone(metadata_buf);
3742 free(metadata_buf);
3743 }
3744 goto done_metadata;
3745 }
3746 LOGH("valid frame_number = %u, capture_time = %lld",
3747 frame_number, capture_time);
3748
Emilian Peev4e0fe952017-06-30 12:40:09 -07003749 handleDepthDataLocked(metadata->depth_data, frame_number,
3750 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003751
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003752 // Check whether any stream buffer corresponding to this is dropped or not
3753 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3754 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3755 for (auto & pendingRequest : mPendingRequestsList) {
3756 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3757 mInstantAECSettledFrameNumber)) {
3758 camera3_notify_msg_t notify_msg = {};
3759 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003760 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003761 QCamera3ProcessingChannel *channel =
3762 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003763 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003764 if (p_cam_frame_drop) {
3765 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003766 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003767 // Got the stream ID for drop frame.
3768 dropFrame = true;
3769 break;
3770 }
3771 }
3772 } else {
3773 // This is instant AEC case.
3774 // For instant AEC drop the stream untill AEC is settled.
3775 dropFrame = true;
3776 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003777
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003778 if (dropFrame) {
3779 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3780 if (p_cam_frame_drop) {
3781 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003782 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003783 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003784 } else {
3785 // For instant AEC, inform frame drop and frame number
3786 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3787 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003788 pendingRequest.frame_number, streamID,
3789 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003790 }
3791 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003792 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003793 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003794 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003795 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003796 if (p_cam_frame_drop) {
3797 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003798 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003799 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003800 } else {
3801 // For instant AEC, inform frame drop and frame number
3802 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3803 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003804 pendingRequest.frame_number, streamID,
3805 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003806 }
3807 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003808 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003809 PendingFrameDrop.stream_ID = streamID;
3810 // Add the Frame drop info to mPendingFrameDropList
3811 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003813 }
3814 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003815 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003816
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003817 for (auto & pendingRequest : mPendingRequestsList) {
3818 // Find the pending request with the frame number.
3819 if (pendingRequest.frame_number == frame_number) {
3820 // Update the sensor timestamp.
3821 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003822
Thierry Strudel3d639192016-09-09 11:52:26 -07003823
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003824 /* Set the timestamp in display metadata so that clients aware of
3825 private_handle such as VT can use this un-modified timestamps.
3826 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003827 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003828
Thierry Strudel3d639192016-09-09 11:52:26 -07003829 // Find channel requiring metadata, meaning internal offline postprocess
3830 // is needed.
3831 //TODO: for now, we don't support two streams requiring metadata at the same time.
3832 // (because we are not making copies, and metadata buffer is not reference counted.
3833 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003834 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3835 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003836 if (iter->need_metadata) {
3837 internalPproc = true;
3838 QCamera3ProcessingChannel *channel =
3839 (QCamera3ProcessingChannel *)iter->stream->priv;
3840 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003841 if(p_is_metabuf_queued != NULL) {
3842 *p_is_metabuf_queued = true;
3843 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003844 break;
3845 }
3846 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003847 for (auto itr = pendingRequest.internalRequestList.begin();
3848 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003849 if (itr->need_metadata) {
3850 internalPproc = true;
3851 QCamera3ProcessingChannel *channel =
3852 (QCamera3ProcessingChannel *)itr->stream->priv;
3853 channel->queueReprocMetadata(metadata_buf);
3854 break;
3855 }
3856 }
3857
Thierry Strudel54dc9782017-02-15 12:12:10 -08003858 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003859
3860 bool *enableZsl = nullptr;
3861 if (gExposeEnableZslKey) {
3862 enableZsl = &pendingRequest.enableZsl;
3863 }
3864
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003865 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003866 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003867 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003868
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003869 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003870
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003871 if (pendingRequest.blob_request) {
3872 //Dump tuning metadata if enabled and available
3873 char prop[PROPERTY_VALUE_MAX];
3874 memset(prop, 0, sizeof(prop));
3875 property_get("persist.camera.dumpmetadata", prop, "0");
3876 int32_t enabled = atoi(prop);
3877 if (enabled && metadata->is_tuning_params_valid) {
3878 dumpMetadataToFile(metadata->tuning_params,
3879 mMetaFrameCount,
3880 enabled,
3881 "Snapshot",
3882 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003883 }
3884 }
3885
3886 if (!internalPproc) {
3887 LOGD("couldn't find need_metadata for this metadata");
3888 // Return metadata buffer
3889 if (free_and_bufdone_meta_buf) {
3890 mMetadataChannel->bufDone(metadata_buf);
3891 free(metadata_buf);
3892 }
3893 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003894
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003895 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003896 }
3897 }
3898
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003899 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3900
3901 // Try to send out capture result metadata.
3902 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003903 return;
3904
Thierry Strudel3d639192016-09-09 11:52:26 -07003905done_metadata:
3906 for (pendingRequestIterator i = mPendingRequestsList.begin();
3907 i != mPendingRequestsList.end() ;i++) {
3908 i->pipeline_depth++;
3909 }
3910 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3911 unblockRequestIfNecessary();
3912}
3913
3914/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003915 * FUNCTION : handleDepthDataWithLock
3916 *
3917 * DESCRIPTION: Handles incoming depth data
3918 *
3919 * PARAMETERS : @depthData : Depth data
3920 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003921 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003922 *
3923 * RETURN :
3924 *
3925 *==========================================================================*/
3926void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003927 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003928 uint32_t currentFrameNumber;
3929 buffer_handle_t *depthBuffer;
3930
3931 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003932 return;
3933 }
3934
3935 camera3_stream_buffer_t resultBuffer =
3936 {.acquire_fence = -1,
3937 .release_fence = -1,
3938 .status = CAMERA3_BUFFER_STATUS_OK,
3939 .buffer = nullptr,
3940 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003941 do {
3942 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3943 if (nullptr == depthBuffer) {
3944 break;
3945 }
3946
Emilian Peev7650c122017-01-19 08:24:33 -08003947 resultBuffer.buffer = depthBuffer;
3948 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003949 if (valid) {
3950 int32_t rc = mDepthChannel->populateDepthData(depthData,
3951 frameNumber);
3952 if (NO_ERROR != rc) {
3953 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3954 } else {
3955 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3956 }
Emilian Peev7650c122017-01-19 08:24:33 -08003957 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003958 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003959 }
3960 } else if (currentFrameNumber > frameNumber) {
3961 break;
3962 } else {
3963 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3964 {{currentFrameNumber, mDepthChannel->getStream(),
3965 CAMERA3_MSG_ERROR_BUFFER}}};
3966 orchestrateNotify(&notify_msg);
3967
3968 LOGE("Depth buffer for frame number: %d is missing "
3969 "returning back!", currentFrameNumber);
3970 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3971 }
3972 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003973 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003974 } while (currentFrameNumber < frameNumber);
3975}
3976
3977/*===========================================================================
3978 * FUNCTION : notifyErrorFoPendingDepthData
3979 *
3980 * DESCRIPTION: Returns error for any pending depth buffers
3981 *
3982 * PARAMETERS : depthCh - depth channel that needs to get flushed
3983 *
3984 * RETURN :
3985 *
3986 *==========================================================================*/
3987void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3988 QCamera3DepthChannel *depthCh) {
3989 uint32_t currentFrameNumber;
3990 buffer_handle_t *depthBuffer;
3991
3992 if (nullptr == depthCh) {
3993 return;
3994 }
3995
3996 camera3_notify_msg_t notify_msg =
3997 {.type = CAMERA3_MSG_ERROR,
3998 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3999 camera3_stream_buffer_t resultBuffer =
4000 {.acquire_fence = -1,
4001 .release_fence = -1,
4002 .buffer = nullptr,
4003 .stream = depthCh->getStream(),
4004 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004005
4006 while (nullptr !=
4007 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4008 depthCh->unmapBuffer(currentFrameNumber);
4009
4010 notify_msg.message.error.frame_number = currentFrameNumber;
4011 orchestrateNotify(&notify_msg);
4012
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004013 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004014 };
4015}
4016
4017/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004018 * FUNCTION : hdrPlusPerfLock
4019 *
4020 * DESCRIPTION: perf lock for HDR+ using custom intent
4021 *
4022 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4023 *
4024 * RETURN : None
4025 *
4026 *==========================================================================*/
4027void QCamera3HardwareInterface::hdrPlusPerfLock(
4028 mm_camera_super_buf_t *metadata_buf)
4029{
4030 if (NULL == metadata_buf) {
4031 LOGE("metadata_buf is NULL");
4032 return;
4033 }
4034 metadata_buffer_t *metadata =
4035 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4036 int32_t *p_frame_number_valid =
4037 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4038 uint32_t *p_frame_number =
4039 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4040
4041 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4042 LOGE("%s: Invalid metadata", __func__);
4043 return;
4044 }
4045
Wei Wang01385482017-08-03 10:49:34 -07004046 //acquire perf lock for 2 secs after the last HDR frame is captured
4047 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004048 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4049 if ((p_frame_number != NULL) &&
4050 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004051 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004052 }
4053 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004054}
4055
4056/*===========================================================================
4057 * FUNCTION : handleInputBufferWithLock
4058 *
4059 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4060 *
4061 * PARAMETERS : @frame_number: frame number of the input buffer
4062 *
4063 * RETURN :
4064 *
4065 *==========================================================================*/
4066void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4067{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004068 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004069 pendingRequestIterator i = mPendingRequestsList.begin();
4070 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4071 i++;
4072 }
4073 if (i != mPendingRequestsList.end() && i->input_buffer) {
4074 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004075 CameraMetadata settings;
4076 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4077 if(i->settings) {
4078 settings = i->settings;
4079 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4080 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004081 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004082 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004083 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004084 } else {
4085 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004086 }
4087
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004088 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4089 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4090 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004091
4092 camera3_capture_result result;
4093 memset(&result, 0, sizeof(camera3_capture_result));
4094 result.frame_number = frame_number;
4095 result.result = i->settings;
4096 result.input_buffer = i->input_buffer;
4097 result.partial_result = PARTIAL_RESULT_COUNT;
4098
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004099 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004100 LOGD("Input request metadata and input buffer frame_number = %u",
4101 i->frame_number);
4102 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004103
4104 // Dispatch result metadata that may be just unblocked by this reprocess result.
4105 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004106 } else {
4107 LOGE("Could not find input request for frame number %d", frame_number);
4108 }
4109}
4110
4111/*===========================================================================
4112 * FUNCTION : handleBufferWithLock
4113 *
4114 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4115 *
4116 * PARAMETERS : @buffer: image buffer for the callback
4117 * @frame_number: frame number of the image buffer
4118 *
4119 * RETURN :
4120 *
4121 *==========================================================================*/
4122void QCamera3HardwareInterface::handleBufferWithLock(
4123 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4124{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004125 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004126
4127 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4128 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4129 }
4130
Thierry Strudel3d639192016-09-09 11:52:26 -07004131 /* Nothing to be done during error state */
4132 if ((ERROR == mState) || (DEINIT == mState)) {
4133 return;
4134 }
4135 if (mFlushPerf) {
4136 handleBuffersDuringFlushLock(buffer);
4137 return;
4138 }
4139 //not in flush
4140 // If the frame number doesn't exist in the pending request list,
4141 // directly send the buffer to the frameworks, and update pending buffers map
4142 // Otherwise, book-keep the buffer.
4143 pendingRequestIterator i = mPendingRequestsList.begin();
4144 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4145 i++;
4146 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004147
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004148 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004149 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004150 // For a reprocessing request, try to send out result metadata.
4151 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004152 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004153 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004154
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004155 // Check if this frame was dropped.
4156 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4157 m != mPendingFrameDropList.end(); m++) {
4158 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4159 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4160 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4161 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4162 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4163 frame_number, streamID);
4164 m = mPendingFrameDropList.erase(m);
4165 break;
4166 }
4167 }
4168
4169 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4170 LOGH("result frame_number = %d, buffer = %p",
4171 frame_number, buffer->buffer);
4172
4173 mPendingBuffersMap.removeBuf(buffer->buffer);
4174 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4175
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004176 if (mPreviewStarted == false) {
4177 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4178 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004179 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4180
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004181 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4182 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4183 mPreviewStarted = true;
4184
4185 // Set power hint for preview
4186 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4187 }
4188 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004189}
4190
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004191void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004192 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004193{
4194 // Find the pending request for this result metadata.
4195 auto requestIter = mPendingRequestsList.begin();
4196 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4197 requestIter++;
4198 }
4199
4200 if (requestIter == mPendingRequestsList.end()) {
4201 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4202 return;
4203 }
4204
4205 // Update the result metadata
4206 requestIter->resultMetadata = resultMetadata;
4207
4208 // Check what type of request this is.
4209 bool liveRequest = false;
4210 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004211 // HDR+ request doesn't have partial results.
4212 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004213 } else if (requestIter->input_buffer != nullptr) {
4214 // Reprocessing request result is the same as settings.
4215 requestIter->resultMetadata = requestIter->settings;
4216 // Reprocessing request doesn't have partial results.
4217 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4218 } else {
4219 liveRequest = true;
4220 requestIter->partial_result_cnt++;
4221 mPendingLiveRequest--;
4222
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004223 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004224 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004225 // For a live request, send the metadata to HDR+ client.
4226 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4227 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4228 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4229 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004230 }
4231 }
4232
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004233 // Remove len shading map if it's not requested.
4234 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4235 CameraMetadata metadata;
4236 metadata.acquire(resultMetadata);
4237 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4238 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4239 &requestIter->requestedLensShadingMapMode, 1);
4240
4241 requestIter->resultMetadata = metadata.release();
4242 }
4243
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004244 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4245}
4246
4247void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4248 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004249 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4250 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004251 bool readyToSend = true;
4252
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004253 // Iterate through the pending requests to send out result metadata that are ready. Also if
4254 // this result metadata belongs to a live request, notify errors for previous live requests
4255 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004256 auto iter = mPendingRequestsList.begin();
4257 while (iter != mPendingRequestsList.end()) {
4258 // Check if current pending request is ready. If it's not ready, the following pending
4259 // requests are also not ready.
4260 if (readyToSend && iter->resultMetadata == nullptr) {
4261 readyToSend = false;
4262 }
4263
4264 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4265
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004266 camera3_capture_result_t result = {};
4267 result.frame_number = iter->frame_number;
4268 result.result = iter->resultMetadata;
4269 result.partial_result = iter->partial_result_cnt;
4270
4271 // If this pending buffer has result metadata, we may be able to send out shutter callback
4272 // and result metadata.
4273 if (iter->resultMetadata != nullptr) {
4274 if (!readyToSend) {
4275 // If any of the previous pending request is not ready, this pending request is
4276 // also not ready to send in order to keep shutter callbacks and result metadata
4277 // in order.
4278 iter++;
4279 continue;
4280 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004281 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004282 // If the result metadata belongs to a live request, notify errors for previous pending
4283 // live requests.
4284 mPendingLiveRequest--;
4285
4286 CameraMetadata dummyMetadata;
4287 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4288 result.result = dummyMetadata.release();
4289
4290 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004291
4292 // partial_result should be PARTIAL_RESULT_CNT in case of
4293 // ERROR_RESULT.
4294 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4295 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004296 } else {
4297 iter++;
4298 continue;
4299 }
4300
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004301 result.output_buffers = nullptr;
4302 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004303 orchestrateResult(&result);
4304
4305 // For reprocessing, result metadata is the same as settings so do not free it here to
4306 // avoid double free.
4307 if (result.result != iter->settings) {
4308 free_camera_metadata((camera_metadata_t *)result.result);
4309 }
4310 iter->resultMetadata = nullptr;
4311 iter = erasePendingRequest(iter);
4312 }
4313
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004314 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004315 for (auto &iter : mPendingRequestsList) {
4316 // Increment pipeline depth for the following pending requests.
4317 if (iter.frame_number > frameNumber) {
4318 iter.pipeline_depth++;
4319 }
4320 }
4321 }
4322
4323 unblockRequestIfNecessary();
4324}
4325
Thierry Strudel3d639192016-09-09 11:52:26 -07004326/*===========================================================================
4327 * FUNCTION : unblockRequestIfNecessary
4328 *
4329 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4330 * that mMutex is held when this function is called.
4331 *
4332 * PARAMETERS :
4333 *
4334 * RETURN :
4335 *
4336 *==========================================================================*/
4337void QCamera3HardwareInterface::unblockRequestIfNecessary()
4338{
4339 // Unblock process_capture_request
4340 pthread_cond_signal(&mRequestCond);
4341}
4342
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004343/*===========================================================================
4344 * FUNCTION : isHdrSnapshotRequest
4345 *
4346 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4347 *
4348 * PARAMETERS : camera3 request structure
4349 *
4350 * RETURN : boolean decision variable
4351 *
4352 *==========================================================================*/
4353bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4354{
4355 if (request == NULL) {
4356 LOGE("Invalid request handle");
4357 assert(0);
4358 return false;
4359 }
4360
4361 if (!mForceHdrSnapshot) {
4362 CameraMetadata frame_settings;
4363 frame_settings = request->settings;
4364
4365 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4366 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4367 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4368 return false;
4369 }
4370 } else {
4371 return false;
4372 }
4373
4374 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4375 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4376 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4377 return false;
4378 }
4379 } else {
4380 return false;
4381 }
4382 }
4383
4384 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4385 if (request->output_buffers[i].stream->format
4386 == HAL_PIXEL_FORMAT_BLOB) {
4387 return true;
4388 }
4389 }
4390
4391 return false;
4392}
4393/*===========================================================================
4394 * FUNCTION : orchestrateRequest
4395 *
4396 * DESCRIPTION: Orchestrates a capture request from camera service
4397 *
4398 * PARAMETERS :
4399 * @request : request from framework to process
4400 *
4401 * RETURN : Error status codes
4402 *
4403 *==========================================================================*/
4404int32_t QCamera3HardwareInterface::orchestrateRequest(
4405 camera3_capture_request_t *request)
4406{
4407
4408 uint32_t originalFrameNumber = request->frame_number;
4409 uint32_t originalOutputCount = request->num_output_buffers;
4410 const camera_metadata_t *original_settings = request->settings;
4411 List<InternalRequest> internallyRequestedStreams;
4412 List<InternalRequest> emptyInternalList;
4413
4414 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4415 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4416 uint32_t internalFrameNumber;
4417 CameraMetadata modified_meta;
4418
4419
4420 /* Add Blob channel to list of internally requested streams */
4421 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4422 if (request->output_buffers[i].stream->format
4423 == HAL_PIXEL_FORMAT_BLOB) {
4424 InternalRequest streamRequested;
4425 streamRequested.meteringOnly = 1;
4426 streamRequested.need_metadata = 0;
4427 streamRequested.stream = request->output_buffers[i].stream;
4428 internallyRequestedStreams.push_back(streamRequested);
4429 }
4430 }
4431 request->num_output_buffers = 0;
4432 auto itr = internallyRequestedStreams.begin();
4433
4434 /* Modify setting to set compensation */
4435 modified_meta = request->settings;
4436 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4437 uint8_t aeLock = 1;
4438 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4439 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4440 camera_metadata_t *modified_settings = modified_meta.release();
4441 request->settings = modified_settings;
4442
4443 /* Capture Settling & -2x frame */
4444 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4445 request->frame_number = internalFrameNumber;
4446 processCaptureRequest(request, internallyRequestedStreams);
4447
4448 request->num_output_buffers = originalOutputCount;
4449 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4450 request->frame_number = internalFrameNumber;
4451 processCaptureRequest(request, emptyInternalList);
4452 request->num_output_buffers = 0;
4453
4454 modified_meta = modified_settings;
4455 expCompensation = 0;
4456 aeLock = 1;
4457 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4458 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4459 modified_settings = modified_meta.release();
4460 request->settings = modified_settings;
4461
4462 /* Capture Settling & 0X frame */
4463
4464 itr = internallyRequestedStreams.begin();
4465 if (itr == internallyRequestedStreams.end()) {
4466 LOGE("Error Internally Requested Stream list is empty");
4467 assert(0);
4468 } else {
4469 itr->need_metadata = 0;
4470 itr->meteringOnly = 1;
4471 }
4472
4473 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4474 request->frame_number = internalFrameNumber;
4475 processCaptureRequest(request, internallyRequestedStreams);
4476
4477 itr = internallyRequestedStreams.begin();
4478 if (itr == internallyRequestedStreams.end()) {
4479 ALOGE("Error Internally Requested Stream list is empty");
4480 assert(0);
4481 } else {
4482 itr->need_metadata = 1;
4483 itr->meteringOnly = 0;
4484 }
4485
4486 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4487 request->frame_number = internalFrameNumber;
4488 processCaptureRequest(request, internallyRequestedStreams);
4489
4490 /* Capture 2X frame*/
4491 modified_meta = modified_settings;
4492 expCompensation = GB_HDR_2X_STEP_EV;
4493 aeLock = 1;
4494 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4495 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4496 modified_settings = modified_meta.release();
4497 request->settings = modified_settings;
4498
4499 itr = internallyRequestedStreams.begin();
4500 if (itr == internallyRequestedStreams.end()) {
4501 ALOGE("Error Internally Requested Stream list is empty");
4502 assert(0);
4503 } else {
4504 itr->need_metadata = 0;
4505 itr->meteringOnly = 1;
4506 }
4507 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4508 request->frame_number = internalFrameNumber;
4509 processCaptureRequest(request, internallyRequestedStreams);
4510
4511 itr = internallyRequestedStreams.begin();
4512 if (itr == internallyRequestedStreams.end()) {
4513 ALOGE("Error Internally Requested Stream list is empty");
4514 assert(0);
4515 } else {
4516 itr->need_metadata = 1;
4517 itr->meteringOnly = 0;
4518 }
4519
4520 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4521 request->frame_number = internalFrameNumber;
4522 processCaptureRequest(request, internallyRequestedStreams);
4523
4524
4525 /* Capture 2X on original streaming config*/
4526 internallyRequestedStreams.clear();
4527
4528 /* Restore original settings pointer */
4529 request->settings = original_settings;
4530 } else {
4531 uint32_t internalFrameNumber;
4532 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4533 request->frame_number = internalFrameNumber;
4534 return processCaptureRequest(request, internallyRequestedStreams);
4535 }
4536
4537 return NO_ERROR;
4538}
4539
4540/*===========================================================================
4541 * FUNCTION : orchestrateResult
4542 *
4543 * DESCRIPTION: Orchestrates a capture result to camera service
4544 *
4545 * PARAMETERS :
4546 * @request : request from framework to process
4547 *
4548 * RETURN :
4549 *
4550 *==========================================================================*/
4551void QCamera3HardwareInterface::orchestrateResult(
4552 camera3_capture_result_t *result)
4553{
4554 uint32_t frameworkFrameNumber;
4555 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4556 frameworkFrameNumber);
4557 if (rc != NO_ERROR) {
4558 LOGE("Cannot find translated frameworkFrameNumber");
4559 assert(0);
4560 } else {
4561 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004562 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004563 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004564 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004565 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4566 camera_metadata_entry_t entry;
4567 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4568 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004569 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004570 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4571 if (ret != OK)
4572 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004573 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004574 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004575 result->frame_number = frameworkFrameNumber;
4576 mCallbackOps->process_capture_result(mCallbackOps, result);
4577 }
4578 }
4579}
4580
4581/*===========================================================================
4582 * FUNCTION : orchestrateNotify
4583 *
4584 * DESCRIPTION: Orchestrates a notify to camera service
4585 *
4586 * PARAMETERS :
4587 * @request : request from framework to process
4588 *
4589 * RETURN :
4590 *
4591 *==========================================================================*/
4592void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4593{
4594 uint32_t frameworkFrameNumber;
4595 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004596 int32_t rc = NO_ERROR;
4597
4598 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004599 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004600
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004601 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004602 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4603 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4604 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004605 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004606 LOGE("Cannot find translated frameworkFrameNumber");
4607 assert(0);
4608 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004609 }
4610 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004611
4612 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4613 LOGD("Internal Request drop the notifyCb");
4614 } else {
4615 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4616 mCallbackOps->notify(mCallbackOps, notify_msg);
4617 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004618}
4619
4620/*===========================================================================
4621 * FUNCTION : FrameNumberRegistry
4622 *
4623 * DESCRIPTION: Constructor
4624 *
4625 * PARAMETERS :
4626 *
4627 * RETURN :
4628 *
4629 *==========================================================================*/
4630FrameNumberRegistry::FrameNumberRegistry()
4631{
4632 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4633}
4634
4635/*===========================================================================
4636 * FUNCTION : ~FrameNumberRegistry
4637 *
4638 * DESCRIPTION: Destructor
4639 *
4640 * PARAMETERS :
4641 *
4642 * RETURN :
4643 *
4644 *==========================================================================*/
4645FrameNumberRegistry::~FrameNumberRegistry()
4646{
4647}
4648
4649/*===========================================================================
4650 * FUNCTION : PurgeOldEntriesLocked
4651 *
4652 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4653 *
4654 * PARAMETERS :
4655 *
4656 * RETURN : NONE
4657 *
4658 *==========================================================================*/
4659void FrameNumberRegistry::purgeOldEntriesLocked()
4660{
4661 while (_register.begin() != _register.end()) {
4662 auto itr = _register.begin();
4663 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4664 _register.erase(itr);
4665 } else {
4666 return;
4667 }
4668 }
4669}
4670
4671/*===========================================================================
4672 * FUNCTION : allocStoreInternalFrameNumber
4673 *
4674 * DESCRIPTION: Method to note down a framework request and associate a new
4675 * internal request number against it
4676 *
4677 * PARAMETERS :
4678 * @fFrameNumber: Identifier given by framework
4679 * @internalFN : Output parameter which will have the newly generated internal
4680 * entry
4681 *
4682 * RETURN : Error code
4683 *
4684 *==========================================================================*/
4685int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4686 uint32_t &internalFrameNumber)
4687{
4688 Mutex::Autolock lock(mRegistryLock);
4689 internalFrameNumber = _nextFreeInternalNumber++;
4690 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4691 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4692 purgeOldEntriesLocked();
4693 return NO_ERROR;
4694}
4695
4696/*===========================================================================
4697 * FUNCTION : generateStoreInternalFrameNumber
4698 *
4699 * DESCRIPTION: Method to associate a new internal request number independent
4700 * of any associate with framework requests
4701 *
4702 * PARAMETERS :
4703 * @internalFrame#: Output parameter which will have the newly generated internal
4704 *
4705 *
4706 * RETURN : Error code
4707 *
4708 *==========================================================================*/
4709int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4710{
4711 Mutex::Autolock lock(mRegistryLock);
4712 internalFrameNumber = _nextFreeInternalNumber++;
4713 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4714 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4715 purgeOldEntriesLocked();
4716 return NO_ERROR;
4717}
4718
4719/*===========================================================================
4720 * FUNCTION : getFrameworkFrameNumber
4721 *
4722 * DESCRIPTION: Method to query the framework framenumber given an internal #
4723 *
4724 * PARAMETERS :
4725 * @internalFrame#: Internal reference
4726 * @frameworkframenumber: Output parameter holding framework frame entry
4727 *
4728 * RETURN : Error code
4729 *
4730 *==========================================================================*/
4731int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4732 uint32_t &frameworkFrameNumber)
4733{
4734 Mutex::Autolock lock(mRegistryLock);
4735 auto itr = _register.find(internalFrameNumber);
4736 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004737 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004738 return -ENOENT;
4739 }
4740
4741 frameworkFrameNumber = itr->second;
4742 purgeOldEntriesLocked();
4743 return NO_ERROR;
4744}
Thierry Strudel3d639192016-09-09 11:52:26 -07004745
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004746status_t QCamera3HardwareInterface::fillPbStreamConfig(
4747 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4748 QCamera3Channel *channel, uint32_t streamIndex) {
4749 if (config == nullptr) {
4750 LOGE("%s: config is null", __FUNCTION__);
4751 return BAD_VALUE;
4752 }
4753
4754 if (channel == nullptr) {
4755 LOGE("%s: channel is null", __FUNCTION__);
4756 return BAD_VALUE;
4757 }
4758
4759 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4760 if (stream == nullptr) {
4761 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4762 return NAME_NOT_FOUND;
4763 }
4764
4765 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4766 if (streamInfo == nullptr) {
4767 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4768 return NAME_NOT_FOUND;
4769 }
4770
4771 config->id = pbStreamId;
4772 config->image.width = streamInfo->dim.width;
4773 config->image.height = streamInfo->dim.height;
4774 config->image.padding = 0;
4775 config->image.format = pbStreamFormat;
4776
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004777 uint32_t totalPlaneSize = 0;
4778
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004779 // Fill plane information.
4780 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4781 pbcamera::PlaneConfiguration plane;
4782 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4783 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4784 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004785
4786 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004787 }
4788
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004789 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004790 return OK;
4791}
4792
Thierry Strudel3d639192016-09-09 11:52:26 -07004793/*===========================================================================
4794 * FUNCTION : processCaptureRequest
4795 *
4796 * DESCRIPTION: process a capture request from camera service
4797 *
4798 * PARAMETERS :
4799 * @request : request from framework to process
4800 *
4801 * RETURN :
4802 *
4803 *==========================================================================*/
4804int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004805 camera3_capture_request_t *request,
4806 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004807{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004808 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004809 int rc = NO_ERROR;
4810 int32_t request_id;
4811 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004812 bool isVidBufRequested = false;
4813 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004814 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004815
4816 pthread_mutex_lock(&mMutex);
4817
4818 // Validate current state
4819 switch (mState) {
4820 case CONFIGURED:
4821 case STARTED:
4822 /* valid state */
4823 break;
4824
4825 case ERROR:
4826 pthread_mutex_unlock(&mMutex);
4827 handleCameraDeviceError();
4828 return -ENODEV;
4829
4830 default:
4831 LOGE("Invalid state %d", mState);
4832 pthread_mutex_unlock(&mMutex);
4833 return -ENODEV;
4834 }
4835
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004836 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004837 if (rc != NO_ERROR) {
4838 LOGE("incoming request is not valid");
4839 pthread_mutex_unlock(&mMutex);
4840 return rc;
4841 }
4842
4843 meta = request->settings;
4844
4845 // For first capture request, send capture intent, and
4846 // stream on all streams
4847 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004848 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004849 // send an unconfigure to the backend so that the isp
4850 // resources are deallocated
4851 if (!mFirstConfiguration) {
4852 cam_stream_size_info_t stream_config_info;
4853 int32_t hal_version = CAM_HAL_V3;
4854 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4855 stream_config_info.buffer_info.min_buffers =
4856 MIN_INFLIGHT_REQUESTS;
4857 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004858 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004859 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004860 clear_metadata_buffer(mParameters);
4861 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4862 CAM_INTF_PARM_HAL_VERSION, hal_version);
4863 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4864 CAM_INTF_META_STREAM_INFO, stream_config_info);
4865 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4866 mParameters);
4867 if (rc < 0) {
4868 LOGE("set_parms for unconfigure failed");
4869 pthread_mutex_unlock(&mMutex);
4870 return rc;
4871 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004872
Thierry Strudel3d639192016-09-09 11:52:26 -07004873 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004874 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004875 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004876 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004877 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004878 property_get("persist.camera.is_type", is_type_value, "4");
4879 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4880 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4881 property_get("persist.camera.is_type_preview", is_type_value, "4");
4882 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4883 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004884
4885 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4886 int32_t hal_version = CAM_HAL_V3;
4887 uint8_t captureIntent =
4888 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4889 mCaptureIntent = captureIntent;
4890 clear_metadata_buffer(mParameters);
4891 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4892 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4893 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004894 if (mFirstConfiguration) {
4895 // configure instant AEC
4896 // Instant AEC is a session based parameter and it is needed only
4897 // once per complete session after open camera.
4898 // i.e. This is set only once for the first capture request, after open camera.
4899 setInstantAEC(meta);
4900 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004901 uint8_t fwkVideoStabMode=0;
4902 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4903 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4904 }
4905
Xue Tuecac74e2017-04-17 13:58:15 -07004906 // If EIS setprop is enabled then only turn it on for video/preview
4907 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004908 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004909 int32_t vsMode;
4910 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4911 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4912 rc = BAD_VALUE;
4913 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004914 LOGD("setEis %d", setEis);
4915 bool eis3Supported = false;
4916 size_t count = IS_TYPE_MAX;
4917 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4918 for (size_t i = 0; i < count; i++) {
4919 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4920 eis3Supported = true;
4921 break;
4922 }
4923 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004924
4925 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004926 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4928 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004929 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4930 is_type = isTypePreview;
4931 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4932 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4933 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004934 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004935 } else {
4936 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004937 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004938 } else {
4939 is_type = IS_TYPE_NONE;
4940 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004941 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004942 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004943 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4944 }
4945 }
4946
4947 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4948 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4949
Thierry Strudel54dc9782017-02-15 12:12:10 -08004950 //Disable tintless only if the property is set to 0
4951 memset(prop, 0, sizeof(prop));
4952 property_get("persist.camera.tintless.enable", prop, "1");
4953 int32_t tintless_value = atoi(prop);
4954
Thierry Strudel3d639192016-09-09 11:52:26 -07004955 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4956 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004957
Thierry Strudel3d639192016-09-09 11:52:26 -07004958 //Disable CDS for HFR mode or if DIS/EIS is on.
4959 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4960 //after every configure_stream
4961 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4962 (m_bIsVideo)) {
4963 int32_t cds = CAM_CDS_MODE_OFF;
4964 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4965 CAM_INTF_PARM_CDS_MODE, cds))
4966 LOGE("Failed to disable CDS for HFR mode");
4967
4968 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004969
4970 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4971 uint8_t* use_av_timer = NULL;
4972
4973 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004974 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004975 use_av_timer = &m_debug_avtimer;
4976 }
4977 else{
4978 use_av_timer =
4979 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004980 if (use_av_timer) {
4981 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4982 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 }
4984
4985 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4986 rc = BAD_VALUE;
4987 }
4988 }
4989
Thierry Strudel3d639192016-09-09 11:52:26 -07004990 setMobicat();
4991
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004992 uint8_t nrMode = 0;
4993 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4994 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4995 }
4996
Thierry Strudel3d639192016-09-09 11:52:26 -07004997 /* Set fps and hfr mode while sending meta stream info so that sensor
4998 * can configure appropriate streaming mode */
4999 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005000 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5001 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005002 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5003 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005004 if (rc == NO_ERROR) {
5005 int32_t max_fps =
5006 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005007 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005008 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5009 }
5010 /* For HFR, more buffers are dequeued upfront to improve the performance */
5011 if (mBatchSize) {
5012 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5013 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5014 }
5015 }
5016 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005017 LOGE("setHalFpsRange failed");
5018 }
5019 }
5020 if (meta.exists(ANDROID_CONTROL_MODE)) {
5021 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5022 rc = extractSceneMode(meta, metaMode, mParameters);
5023 if (rc != NO_ERROR) {
5024 LOGE("extractSceneMode failed");
5025 }
5026 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005027 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005028
Thierry Strudel04e026f2016-10-10 11:27:36 -07005029 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5030 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5031 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5032 rc = setVideoHdrMode(mParameters, vhdr);
5033 if (rc != NO_ERROR) {
5034 LOGE("setVideoHDR is failed");
5035 }
5036 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005037
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005038 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005039 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005040 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005041 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5042 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5043 sensorModeFullFov)) {
5044 rc = BAD_VALUE;
5045 }
5046 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005047 //TODO: validate the arguments, HSV scenemode should have only the
5048 //advertised fps ranges
5049
5050 /*set the capture intent, hal version, tintless, stream info,
5051 *and disenable parameters to the backend*/
5052 LOGD("set_parms META_STREAM_INFO " );
5053 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005054 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5055 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005056 mStreamConfigInfo.type[i],
5057 mStreamConfigInfo.stream_sizes[i].width,
5058 mStreamConfigInfo.stream_sizes[i].height,
5059 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005060 mStreamConfigInfo.format[i],
5061 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005062 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005063
Thierry Strudel3d639192016-09-09 11:52:26 -07005064 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5065 mParameters);
5066 if (rc < 0) {
5067 LOGE("set_parms failed for hal version, stream info");
5068 }
5069
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005070 cam_sensor_mode_info_t sensorModeInfo = {};
5071 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005072 if (rc != NO_ERROR) {
5073 LOGE("Failed to get sensor output size");
5074 pthread_mutex_unlock(&mMutex);
5075 goto error_exit;
5076 }
5077
5078 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5079 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005080 sensorModeInfo.active_array_size.width,
5081 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005082
5083 /* Set batchmode before initializing channel. Since registerBuffer
5084 * internally initializes some of the channels, better set batchmode
5085 * even before first register buffer */
5086 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5087 it != mStreamInfo.end(); it++) {
5088 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5089 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5090 && mBatchSize) {
5091 rc = channel->setBatchSize(mBatchSize);
5092 //Disable per frame map unmap for HFR/batchmode case
5093 rc |= channel->setPerFrameMapUnmap(false);
5094 if (NO_ERROR != rc) {
5095 LOGE("Channel init failed %d", rc);
5096 pthread_mutex_unlock(&mMutex);
5097 goto error_exit;
5098 }
5099 }
5100 }
5101
5102 //First initialize all streams
5103 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5104 it != mStreamInfo.end(); it++) {
5105 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005106
5107 /* Initial value of NR mode is needed before stream on */
5108 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005109 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5110 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005111 setEis) {
5112 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5113 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5114 is_type = mStreamConfigInfo.is_type[i];
5115 break;
5116 }
5117 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005119 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005120 rc = channel->initialize(IS_TYPE_NONE);
5121 }
5122 if (NO_ERROR != rc) {
5123 LOGE("Channel initialization failed %d", rc);
5124 pthread_mutex_unlock(&mMutex);
5125 goto error_exit;
5126 }
5127 }
5128
5129 if (mRawDumpChannel) {
5130 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5131 if (rc != NO_ERROR) {
5132 LOGE("Error: Raw Dump Channel init failed");
5133 pthread_mutex_unlock(&mMutex);
5134 goto error_exit;
5135 }
5136 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005137 if (mHdrPlusRawSrcChannel) {
5138 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5139 if (rc != NO_ERROR) {
5140 LOGE("Error: HDR+ RAW Source Channel init failed");
5141 pthread_mutex_unlock(&mMutex);
5142 goto error_exit;
5143 }
5144 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005145 if (mSupportChannel) {
5146 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5147 if (rc < 0) {
5148 LOGE("Support channel initialization failed");
5149 pthread_mutex_unlock(&mMutex);
5150 goto error_exit;
5151 }
5152 }
5153 if (mAnalysisChannel) {
5154 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5155 if (rc < 0) {
5156 LOGE("Analysis channel initialization failed");
5157 pthread_mutex_unlock(&mMutex);
5158 goto error_exit;
5159 }
5160 }
5161 if (mDummyBatchChannel) {
5162 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5163 if (rc < 0) {
5164 LOGE("mDummyBatchChannel setBatchSize failed");
5165 pthread_mutex_unlock(&mMutex);
5166 goto error_exit;
5167 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005168 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005169 if (rc < 0) {
5170 LOGE("mDummyBatchChannel initialization failed");
5171 pthread_mutex_unlock(&mMutex);
5172 goto error_exit;
5173 }
5174 }
5175
5176 // Set bundle info
5177 rc = setBundleInfo();
5178 if (rc < 0) {
5179 LOGE("setBundleInfo failed %d", rc);
5180 pthread_mutex_unlock(&mMutex);
5181 goto error_exit;
5182 }
5183
5184 //update settings from app here
5185 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5186 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5187 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5188 }
5189 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5190 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5191 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5192 }
5193 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5194 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5195 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5196
5197 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5198 (mLinkedCameraId != mCameraId) ) {
5199 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5200 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005201 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005202 goto error_exit;
5203 }
5204 }
5205
5206 // add bundle related cameras
5207 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5208 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005209 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5210 &m_pDualCamCmdPtr->bundle_info;
5211 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005212 if (mIsDeviceLinked)
5213 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5214 else
5215 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5216
5217 pthread_mutex_lock(&gCamLock);
5218
5219 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5220 LOGE("Dualcam: Invalid Session Id ");
5221 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005222 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005223 goto error_exit;
5224 }
5225
5226 if (mIsMainCamera == 1) {
5227 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5228 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005229 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005230 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005231 // related session id should be session id of linked session
5232 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5233 } else {
5234 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5235 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005236 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005237 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005238 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5239 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005240 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005241 pthread_mutex_unlock(&gCamLock);
5242
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005243 rc = mCameraHandle->ops->set_dual_cam_cmd(
5244 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005245 if (rc < 0) {
5246 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005247 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005248 goto error_exit;
5249 }
5250 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005251 goto no_error;
5252error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005253 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005254 return rc;
5255no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005256 mWokenUpByDaemon = false;
5257 mPendingLiveRequest = 0;
5258 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005259 }
5260
5261 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005262 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005263
5264 if (mFlushPerf) {
5265 //we cannot accept any requests during flush
5266 LOGE("process_capture_request cannot proceed during flush");
5267 pthread_mutex_unlock(&mMutex);
5268 return NO_ERROR; //should return an error
5269 }
5270
5271 if (meta.exists(ANDROID_REQUEST_ID)) {
5272 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5273 mCurrentRequestId = request_id;
5274 LOGD("Received request with id: %d", request_id);
5275 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5276 LOGE("Unable to find request id field, \
5277 & no previous id available");
5278 pthread_mutex_unlock(&mMutex);
5279 return NAME_NOT_FOUND;
5280 } else {
5281 LOGD("Re-using old request id");
5282 request_id = mCurrentRequestId;
5283 }
5284
5285 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5286 request->num_output_buffers,
5287 request->input_buffer,
5288 frameNumber);
5289 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005290 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005291 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005292 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005293 uint32_t snapshotStreamId = 0;
5294 for (size_t i = 0; i < request->num_output_buffers; i++) {
5295 const camera3_stream_buffer_t& output = request->output_buffers[i];
5296 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5297
Emilian Peev7650c122017-01-19 08:24:33 -08005298 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5299 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005300 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005301 blob_request = 1;
5302 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5303 }
5304
5305 if (output.acquire_fence != -1) {
5306 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5307 close(output.acquire_fence);
5308 if (rc != OK) {
5309 LOGE("sync wait failed %d", rc);
5310 pthread_mutex_unlock(&mMutex);
5311 return rc;
5312 }
5313 }
5314
Emilian Peev0f3c3162017-03-15 12:57:46 +00005315 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5316 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005317 depthRequestPresent = true;
5318 continue;
5319 }
5320
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005321 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005322 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005323
5324 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5325 isVidBufRequested = true;
5326 }
5327 }
5328
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005329 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5330 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5331 itr++) {
5332 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5333 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5334 channel->getStreamID(channel->getStreamTypeMask());
5335
5336 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5337 isVidBufRequested = true;
5338 }
5339 }
5340
Thierry Strudel3d639192016-09-09 11:52:26 -07005341 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005342 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005343 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005344 }
5345 if (blob_request && mRawDumpChannel) {
5346 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005347 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005348 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005349 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005350 }
5351
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005352 {
5353 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5354 // Request a RAW buffer if
5355 // 1. mHdrPlusRawSrcChannel is valid.
5356 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5357 // 3. There is no pending HDR+ request.
5358 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5359 mHdrPlusPendingRequests.size() == 0) {
5360 streamsArray.stream_request[streamsArray.num_streams].streamID =
5361 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5362 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5363 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005364 }
5365
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005366 //extract capture intent
5367 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5368 mCaptureIntent =
5369 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5370 }
5371
5372 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5373 mCacMode =
5374 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5375 }
5376
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005377 uint8_t requestedLensShadingMapMode;
5378 // Get the shading map mode.
5379 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5380 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5381 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5382 } else {
5383 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5384 }
5385
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005386 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005387 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005388
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005389 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005390 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005391 // If this request has a still capture intent, try to submit an HDR+ request.
5392 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5393 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5394 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5395 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005396 }
5397
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005398 if (hdrPlusRequest) {
5399 // For a HDR+ request, just set the frame parameters.
5400 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5401 if (rc < 0) {
5402 LOGE("fail to set frame parameters");
5403 pthread_mutex_unlock(&mMutex);
5404 return rc;
5405 }
5406 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005407 /* Parse the settings:
5408 * - For every request in NORMAL MODE
5409 * - For every request in HFR mode during preview only case
5410 * - For first request of every batch in HFR mode during video
5411 * recording. In batchmode the same settings except frame number is
5412 * repeated in each request of the batch.
5413 */
5414 if (!mBatchSize ||
5415 (mBatchSize && !isVidBufRequested) ||
5416 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005417 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005418 if (rc < 0) {
5419 LOGE("fail to set frame parameters");
5420 pthread_mutex_unlock(&mMutex);
5421 return rc;
5422 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005423
5424 {
5425 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5426 // will be reported in result metadata.
5427 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5428 if (mHdrPlusModeEnabled) {
5429 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5430 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5431 }
5432 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005433 }
5434 /* For batchMode HFR, setFrameParameters is not called for every
5435 * request. But only frame number of the latest request is parsed.
5436 * Keep track of first and last frame numbers in a batch so that
5437 * metadata for the frame numbers of batch can be duplicated in
5438 * handleBatchMetadta */
5439 if (mBatchSize) {
5440 if (!mToBeQueuedVidBufs) {
5441 //start of the batch
5442 mFirstFrameNumberInBatch = request->frame_number;
5443 }
5444 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5445 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5446 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005447 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005448 return BAD_VALUE;
5449 }
5450 }
5451 if (mNeedSensorRestart) {
5452 /* Unlock the mutex as restartSensor waits on the channels to be
5453 * stopped, which in turn calls stream callback functions -
5454 * handleBufferWithLock and handleMetadataWithLock */
5455 pthread_mutex_unlock(&mMutex);
5456 rc = dynamicUpdateMetaStreamInfo();
5457 if (rc != NO_ERROR) {
5458 LOGE("Restarting the sensor failed");
5459 return BAD_VALUE;
5460 }
5461 mNeedSensorRestart = false;
5462 pthread_mutex_lock(&mMutex);
5463 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005464 if(mResetInstantAEC) {
5465 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5466 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5467 mResetInstantAEC = false;
5468 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005469 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005470 if (request->input_buffer->acquire_fence != -1) {
5471 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5472 close(request->input_buffer->acquire_fence);
5473 if (rc != OK) {
5474 LOGE("input buffer sync wait failed %d", rc);
5475 pthread_mutex_unlock(&mMutex);
5476 return rc;
5477 }
5478 }
5479 }
5480
5481 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5482 mLastCustIntentFrmNum = frameNumber;
5483 }
5484 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005485 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005486 pendingRequestIterator latestRequest;
5487 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005488 pendingRequest.num_buffers = depthRequestPresent ?
5489 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005490 pendingRequest.request_id = request_id;
5491 pendingRequest.blob_request = blob_request;
5492 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005493 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005494 if (request->input_buffer) {
5495 pendingRequest.input_buffer =
5496 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5497 *(pendingRequest.input_buffer) = *(request->input_buffer);
5498 pInputBuffer = pendingRequest.input_buffer;
5499 } else {
5500 pendingRequest.input_buffer = NULL;
5501 pInputBuffer = NULL;
5502 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005503 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005504
5505 pendingRequest.pipeline_depth = 0;
5506 pendingRequest.partial_result_cnt = 0;
5507 extractJpegMetadata(mCurJpegMeta, request);
5508 pendingRequest.jpegMetadata = mCurJpegMeta;
5509 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005510 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005511 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5512 mHybridAeEnable =
5513 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5514 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005515
5516 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5517 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005518 /* DevCamDebug metadata processCaptureRequest */
5519 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5520 mDevCamDebugMetaEnable =
5521 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5522 }
5523 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5524 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005525
5526 //extract CAC info
5527 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5528 mCacMode =
5529 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5530 }
5531 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005532 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005533 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5534 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005535
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005536 // extract enableZsl info
5537 if (gExposeEnableZslKey) {
5538 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5539 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5540 mZslEnabled = pendingRequest.enableZsl;
5541 } else {
5542 pendingRequest.enableZsl = mZslEnabled;
5543 }
5544 }
5545
Thierry Strudel3d639192016-09-09 11:52:26 -07005546 PendingBuffersInRequest bufsForCurRequest;
5547 bufsForCurRequest.frame_number = frameNumber;
5548 // Mark current timestamp for the new request
5549 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005550 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005551
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005552 if (hdrPlusRequest) {
5553 // Save settings for this request.
5554 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5555 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5556
5557 // Add to pending HDR+ request queue.
5558 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5559 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5560
5561 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5562 }
5563
Thierry Strudel3d639192016-09-09 11:52:26 -07005564 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005565 if ((request->output_buffers[i].stream->data_space ==
5566 HAL_DATASPACE_DEPTH) &&
5567 (HAL_PIXEL_FORMAT_BLOB ==
5568 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005569 continue;
5570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005571 RequestedBufferInfo requestedBuf;
5572 memset(&requestedBuf, 0, sizeof(requestedBuf));
5573 requestedBuf.stream = request->output_buffers[i].stream;
5574 requestedBuf.buffer = NULL;
5575 pendingRequest.buffers.push_back(requestedBuf);
5576
5577 // Add to buffer handle the pending buffers list
5578 PendingBufferInfo bufferInfo;
5579 bufferInfo.buffer = request->output_buffers[i].buffer;
5580 bufferInfo.stream = request->output_buffers[i].stream;
5581 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5582 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5583 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5584 frameNumber, bufferInfo.buffer,
5585 channel->getStreamTypeMask(), bufferInfo.stream->format);
5586 }
5587 // Add this request packet into mPendingBuffersMap
5588 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5589 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5590 mPendingBuffersMap.get_num_overall_buffers());
5591
5592 latestRequest = mPendingRequestsList.insert(
5593 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005594
5595 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5596 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005597 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005598 for (size_t i = 0; i < request->num_output_buffers; i++) {
5599 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5600 }
5601
Thierry Strudel3d639192016-09-09 11:52:26 -07005602 if(mFlush) {
5603 LOGI("mFlush is true");
5604 pthread_mutex_unlock(&mMutex);
5605 return NO_ERROR;
5606 }
5607
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005608 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5609 // channel.
5610 if (!hdrPlusRequest) {
5611 int indexUsed;
5612 // Notify metadata channel we receive a request
5613 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005614
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005615 if(request->input_buffer != NULL){
5616 LOGD("Input request, frame_number %d", frameNumber);
5617 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5618 if (NO_ERROR != rc) {
5619 LOGE("fail to set reproc parameters");
5620 pthread_mutex_unlock(&mMutex);
5621 return rc;
5622 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005623 }
5624
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005625 // Call request on other streams
5626 uint32_t streams_need_metadata = 0;
5627 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5628 for (size_t i = 0; i < request->num_output_buffers; i++) {
5629 const camera3_stream_buffer_t& output = request->output_buffers[i];
5630 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5631
5632 if (channel == NULL) {
5633 LOGW("invalid channel pointer for stream");
5634 continue;
5635 }
5636
5637 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5638 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5639 output.buffer, request->input_buffer, frameNumber);
5640 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005641 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005642 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5643 if (rc < 0) {
5644 LOGE("Fail to request on picture channel");
5645 pthread_mutex_unlock(&mMutex);
5646 return rc;
5647 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005648 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005649 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5650 assert(NULL != mDepthChannel);
5651 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005652
Emilian Peev7650c122017-01-19 08:24:33 -08005653 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5654 if (rc < 0) {
5655 LOGE("Fail to map on depth buffer");
5656 pthread_mutex_unlock(&mMutex);
5657 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005658 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005659 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005660 } else {
5661 LOGD("snapshot request with buffer %p, frame_number %d",
5662 output.buffer, frameNumber);
5663 if (!request->settings) {
5664 rc = channel->request(output.buffer, frameNumber,
5665 NULL, mPrevParameters, indexUsed);
5666 } else {
5667 rc = channel->request(output.buffer, frameNumber,
5668 NULL, mParameters, indexUsed);
5669 }
5670 if (rc < 0) {
5671 LOGE("Fail to request on picture channel");
5672 pthread_mutex_unlock(&mMutex);
5673 return rc;
5674 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005675
Emilian Peev7650c122017-01-19 08:24:33 -08005676 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5677 uint32_t j = 0;
5678 for (j = 0; j < streamsArray.num_streams; j++) {
5679 if (streamsArray.stream_request[j].streamID == streamId) {
5680 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5681 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5682 else
5683 streamsArray.stream_request[j].buf_index = indexUsed;
5684 break;
5685 }
5686 }
5687 if (j == streamsArray.num_streams) {
5688 LOGE("Did not find matching stream to update index");
5689 assert(0);
5690 }
5691
5692 pendingBufferIter->need_metadata = true;
5693 streams_need_metadata++;
5694 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005695 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005696 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5697 bool needMetadata = false;
5698 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5699 rc = yuvChannel->request(output.buffer, frameNumber,
5700 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5701 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005702 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005703 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005704 pthread_mutex_unlock(&mMutex);
5705 return rc;
5706 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005707
5708 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5709 uint32_t j = 0;
5710 for (j = 0; j < streamsArray.num_streams; j++) {
5711 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005712 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5713 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5714 else
5715 streamsArray.stream_request[j].buf_index = indexUsed;
5716 break;
5717 }
5718 }
5719 if (j == streamsArray.num_streams) {
5720 LOGE("Did not find matching stream to update index");
5721 assert(0);
5722 }
5723
5724 pendingBufferIter->need_metadata = needMetadata;
5725 if (needMetadata)
5726 streams_need_metadata += 1;
5727 LOGD("calling YUV channel request, need_metadata is %d",
5728 needMetadata);
5729 } else {
5730 LOGD("request with buffer %p, frame_number %d",
5731 output.buffer, frameNumber);
5732
5733 rc = channel->request(output.buffer, frameNumber, indexUsed);
5734
5735 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5736 uint32_t j = 0;
5737 for (j = 0; j < streamsArray.num_streams; j++) {
5738 if (streamsArray.stream_request[j].streamID == streamId) {
5739 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5740 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5741 else
5742 streamsArray.stream_request[j].buf_index = indexUsed;
5743 break;
5744 }
5745 }
5746 if (j == streamsArray.num_streams) {
5747 LOGE("Did not find matching stream to update index");
5748 assert(0);
5749 }
5750
5751 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5752 && mBatchSize) {
5753 mToBeQueuedVidBufs++;
5754 if (mToBeQueuedVidBufs == mBatchSize) {
5755 channel->queueBatchBuf();
5756 }
5757 }
5758 if (rc < 0) {
5759 LOGE("request failed");
5760 pthread_mutex_unlock(&mMutex);
5761 return rc;
5762 }
5763 }
5764 pendingBufferIter++;
5765 }
5766
5767 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5768 itr++) {
5769 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5770
5771 if (channel == NULL) {
5772 LOGE("invalid channel pointer for stream");
5773 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005774 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005775 return BAD_VALUE;
5776 }
5777
5778 InternalRequest requestedStream;
5779 requestedStream = (*itr);
5780
5781
5782 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5783 LOGD("snapshot request internally input buffer %p, frame_number %d",
5784 request->input_buffer, frameNumber);
5785 if(request->input_buffer != NULL){
5786 rc = channel->request(NULL, frameNumber,
5787 pInputBuffer, &mReprocMeta, indexUsed, true,
5788 requestedStream.meteringOnly);
5789 if (rc < 0) {
5790 LOGE("Fail to request on picture channel");
5791 pthread_mutex_unlock(&mMutex);
5792 return rc;
5793 }
5794 } else {
5795 LOGD("snapshot request with frame_number %d", frameNumber);
5796 if (!request->settings) {
5797 rc = channel->request(NULL, frameNumber,
5798 NULL, mPrevParameters, indexUsed, true,
5799 requestedStream.meteringOnly);
5800 } else {
5801 rc = channel->request(NULL, frameNumber,
5802 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5803 }
5804 if (rc < 0) {
5805 LOGE("Fail to request on picture channel");
5806 pthread_mutex_unlock(&mMutex);
5807 return rc;
5808 }
5809
5810 if ((*itr).meteringOnly != 1) {
5811 requestedStream.need_metadata = 1;
5812 streams_need_metadata++;
5813 }
5814 }
5815
5816 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5817 uint32_t j = 0;
5818 for (j = 0; j < streamsArray.num_streams; j++) {
5819 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005820 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5821 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5822 else
5823 streamsArray.stream_request[j].buf_index = indexUsed;
5824 break;
5825 }
5826 }
5827 if (j == streamsArray.num_streams) {
5828 LOGE("Did not find matching stream to update index");
5829 assert(0);
5830 }
5831
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005832 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005833 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005834 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005835 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005836 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005837 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005838 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005839 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005840
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005841 //If 2 streams have need_metadata set to true, fail the request, unless
5842 //we copy/reference count the metadata buffer
5843 if (streams_need_metadata > 1) {
5844 LOGE("not supporting request in which two streams requires"
5845 " 2 HAL metadata for reprocessing");
5846 pthread_mutex_unlock(&mMutex);
5847 return -EINVAL;
5848 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005849
Emilian Peev656e4fa2017-06-02 16:47:04 +01005850 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5851 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5852 if (depthRequestPresent && mDepthChannel) {
5853 if (request->settings) {
5854 camera_metadata_ro_entry entry;
5855 if (find_camera_metadata_ro_entry(request->settings,
5856 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5857 if (entry.data.u8[0]) {
5858 pdafEnable = CAM_PD_DATA_ENABLED;
5859 } else {
5860 pdafEnable = CAM_PD_DATA_SKIP;
5861 }
5862 mDepthCloudMode = pdafEnable;
5863 } else {
5864 pdafEnable = mDepthCloudMode;
5865 }
5866 } else {
5867 pdafEnable = mDepthCloudMode;
5868 }
5869 }
5870
Emilian Peev7650c122017-01-19 08:24:33 -08005871 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5872 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5873 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5874 pthread_mutex_unlock(&mMutex);
5875 return BAD_VALUE;
5876 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005877
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005878 if (request->input_buffer == NULL) {
5879 /* Set the parameters to backend:
5880 * - For every request in NORMAL MODE
5881 * - For every request in HFR mode during preview only case
5882 * - Once every batch in HFR mode during video recording
5883 */
5884 if (!mBatchSize ||
5885 (mBatchSize && !isVidBufRequested) ||
5886 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5887 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5888 mBatchSize, isVidBufRequested,
5889 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005890
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005891 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5892 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5893 uint32_t m = 0;
5894 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5895 if (streamsArray.stream_request[k].streamID ==
5896 mBatchedStreamsArray.stream_request[m].streamID)
5897 break;
5898 }
5899 if (m == mBatchedStreamsArray.num_streams) {
5900 mBatchedStreamsArray.stream_request\
5901 [mBatchedStreamsArray.num_streams].streamID =
5902 streamsArray.stream_request[k].streamID;
5903 mBatchedStreamsArray.stream_request\
5904 [mBatchedStreamsArray.num_streams].buf_index =
5905 streamsArray.stream_request[k].buf_index;
5906 mBatchedStreamsArray.num_streams =
5907 mBatchedStreamsArray.num_streams + 1;
5908 }
5909 }
5910 streamsArray = mBatchedStreamsArray;
5911 }
5912 /* Update stream id of all the requested buffers */
5913 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5914 streamsArray)) {
5915 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005916 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005917 return BAD_VALUE;
5918 }
5919
5920 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5921 mParameters);
5922 if (rc < 0) {
5923 LOGE("set_parms failed");
5924 }
5925 /* reset to zero coz, the batch is queued */
5926 mToBeQueuedVidBufs = 0;
5927 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5928 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5929 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005930 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5931 uint32_t m = 0;
5932 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5933 if (streamsArray.stream_request[k].streamID ==
5934 mBatchedStreamsArray.stream_request[m].streamID)
5935 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005936 }
5937 if (m == mBatchedStreamsArray.num_streams) {
5938 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5939 streamID = streamsArray.stream_request[k].streamID;
5940 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5941 buf_index = streamsArray.stream_request[k].buf_index;
5942 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5943 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005944 }
5945 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005946 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005947
5948 // Start all streams after the first setting is sent, so that the
5949 // setting can be applied sooner: (0 + apply_delay)th frame.
5950 if (mState == CONFIGURED && mChannelHandle) {
5951 //Then start them.
5952 LOGH("Start META Channel");
5953 rc = mMetadataChannel->start();
5954 if (rc < 0) {
5955 LOGE("META channel start failed");
5956 pthread_mutex_unlock(&mMutex);
5957 return rc;
5958 }
5959
5960 if (mAnalysisChannel) {
5961 rc = mAnalysisChannel->start();
5962 if (rc < 0) {
5963 LOGE("Analysis channel start failed");
5964 mMetadataChannel->stop();
5965 pthread_mutex_unlock(&mMutex);
5966 return rc;
5967 }
5968 }
5969
5970 if (mSupportChannel) {
5971 rc = mSupportChannel->start();
5972 if (rc < 0) {
5973 LOGE("Support channel start failed");
5974 mMetadataChannel->stop();
5975 /* Although support and analysis are mutually exclusive today
5976 adding it in anycase for future proofing */
5977 if (mAnalysisChannel) {
5978 mAnalysisChannel->stop();
5979 }
5980 pthread_mutex_unlock(&mMutex);
5981 return rc;
5982 }
5983 }
5984 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5985 it != mStreamInfo.end(); it++) {
5986 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5987 LOGH("Start Processing Channel mask=%d",
5988 channel->getStreamTypeMask());
5989 rc = channel->start();
5990 if (rc < 0) {
5991 LOGE("channel start failed");
5992 pthread_mutex_unlock(&mMutex);
5993 return rc;
5994 }
5995 }
5996
5997 if (mRawDumpChannel) {
5998 LOGD("Starting raw dump stream");
5999 rc = mRawDumpChannel->start();
6000 if (rc != NO_ERROR) {
6001 LOGE("Error Starting Raw Dump Channel");
6002 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6003 it != mStreamInfo.end(); it++) {
6004 QCamera3Channel *channel =
6005 (QCamera3Channel *)(*it)->stream->priv;
6006 LOGH("Stopping Processing Channel mask=%d",
6007 channel->getStreamTypeMask());
6008 channel->stop();
6009 }
6010 if (mSupportChannel)
6011 mSupportChannel->stop();
6012 if (mAnalysisChannel) {
6013 mAnalysisChannel->stop();
6014 }
6015 mMetadataChannel->stop();
6016 pthread_mutex_unlock(&mMutex);
6017 return rc;
6018 }
6019 }
6020
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006021 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006022 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006023 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006024 if (rc != NO_ERROR) {
6025 LOGE("start_channel failed %d", rc);
6026 pthread_mutex_unlock(&mMutex);
6027 return rc;
6028 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006029
6030 {
6031 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006032 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006033
6034 // Now that sensor mode should have been selected, get the selected sensor mode
6035 // info.
6036 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6037 getCurrentSensorModeInfo(mSensorModeInfo);
6038
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006039 if (EaselManagerClientOpened) {
6040 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006041 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6042 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006043 if (rc != OK) {
6044 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6045 mCameraId, mSensorModeInfo.op_pixel_clk);
6046 pthread_mutex_unlock(&mMutex);
6047 return rc;
6048 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006049 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006050 }
6051 }
6052
6053 // Start sensor streaming.
6054 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6055 mChannelHandle);
6056 if (rc != NO_ERROR) {
6057 LOGE("start_sensor_stream_on failed %d", rc);
6058 pthread_mutex_unlock(&mMutex);
6059 return rc;
6060 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006061 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006062 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006063 }
6064
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006065 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006066 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006067 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006068 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006069 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6070 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6071 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6072 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006073
6074 if (isSessionHdrPlusModeCompatible()) {
6075 rc = enableHdrPlusModeLocked();
6076 if (rc != OK) {
6077 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6078 pthread_mutex_unlock(&mMutex);
6079 return rc;
6080 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006081 }
6082
6083 mFirstPreviewIntentSeen = true;
6084 }
6085 }
6086
Thierry Strudel3d639192016-09-09 11:52:26 -07006087 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6088
6089 mState = STARTED;
6090 // Added a timed condition wait
6091 struct timespec ts;
6092 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006093 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006094 if (rc < 0) {
6095 isValidTimeout = 0;
6096 LOGE("Error reading the real time clock!!");
6097 }
6098 else {
6099 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006100 int64_t timeout = 5;
6101 {
6102 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6103 // If there is a pending HDR+ request, the following requests may be blocked until the
6104 // HDR+ request is done. So allow a longer timeout.
6105 if (mHdrPlusPendingRequests.size() > 0) {
6106 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6107 }
6108 }
6109 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006110 }
6111 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006112 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006113 (mState != ERROR) && (mState != DEINIT)) {
6114 if (!isValidTimeout) {
6115 LOGD("Blocking on conditional wait");
6116 pthread_cond_wait(&mRequestCond, &mMutex);
6117 }
6118 else {
6119 LOGD("Blocking on timed conditional wait");
6120 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6121 if (rc == ETIMEDOUT) {
6122 rc = -ENODEV;
6123 LOGE("Unblocked on timeout!!!!");
6124 break;
6125 }
6126 }
6127 LOGD("Unblocked");
6128 if (mWokenUpByDaemon) {
6129 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006130 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006131 break;
6132 }
6133 }
6134 pthread_mutex_unlock(&mMutex);
6135
6136 return rc;
6137}
6138
6139/*===========================================================================
6140 * FUNCTION : dump
6141 *
6142 * DESCRIPTION:
6143 *
6144 * PARAMETERS :
6145 *
6146 *
6147 * RETURN :
6148 *==========================================================================*/
6149void QCamera3HardwareInterface::dump(int fd)
6150{
6151 pthread_mutex_lock(&mMutex);
6152 dprintf(fd, "\n Camera HAL3 information Begin \n");
6153
6154 dprintf(fd, "\nNumber of pending requests: %zu \n",
6155 mPendingRequestsList.size());
6156 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6157 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6158 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6159 for(pendingRequestIterator i = mPendingRequestsList.begin();
6160 i != mPendingRequestsList.end(); i++) {
6161 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6162 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6163 i->input_buffer);
6164 }
6165 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6166 mPendingBuffersMap.get_num_overall_buffers());
6167 dprintf(fd, "-------+------------------\n");
6168 dprintf(fd, " Frame | Stream type mask \n");
6169 dprintf(fd, "-------+------------------\n");
6170 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6171 for(auto &j : req.mPendingBufferList) {
6172 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6173 dprintf(fd, " %5d | %11d \n",
6174 req.frame_number, channel->getStreamTypeMask());
6175 }
6176 }
6177 dprintf(fd, "-------+------------------\n");
6178
6179 dprintf(fd, "\nPending frame drop list: %zu\n",
6180 mPendingFrameDropList.size());
6181 dprintf(fd, "-------+-----------\n");
6182 dprintf(fd, " Frame | Stream ID \n");
6183 dprintf(fd, "-------+-----------\n");
6184 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6185 i != mPendingFrameDropList.end(); i++) {
6186 dprintf(fd, " %5d | %9d \n",
6187 i->frame_number, i->stream_ID);
6188 }
6189 dprintf(fd, "-------+-----------\n");
6190
6191 dprintf(fd, "\n Camera HAL3 information End \n");
6192
6193 /* use dumpsys media.camera as trigger to send update debug level event */
6194 mUpdateDebugLevel = true;
6195 pthread_mutex_unlock(&mMutex);
6196 return;
6197}
6198
6199/*===========================================================================
6200 * FUNCTION : flush
6201 *
6202 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6203 * conditionally restarts channels
6204 *
6205 * PARAMETERS :
6206 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006207 * @ stopChannelImmediately: stop the channel immediately. This should be used
6208 * when device encountered an error and MIPI may has
6209 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006210 *
6211 * RETURN :
6212 * 0 on success
6213 * Error code on failure
6214 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006215int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006216{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006217 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006218 int32_t rc = NO_ERROR;
6219
6220 LOGD("Unblocking Process Capture Request");
6221 pthread_mutex_lock(&mMutex);
6222 mFlush = true;
6223 pthread_mutex_unlock(&mMutex);
6224
6225 rc = stopAllChannels();
6226 // unlink of dualcam
6227 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006228 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6229 &m_pDualCamCmdPtr->bundle_info;
6230 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006231 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6232 pthread_mutex_lock(&gCamLock);
6233
6234 if (mIsMainCamera == 1) {
6235 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6236 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006237 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006238 // related session id should be session id of linked session
6239 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6240 } else {
6241 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6242 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006243 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006244 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6245 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006246 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006247 pthread_mutex_unlock(&gCamLock);
6248
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006249 rc = mCameraHandle->ops->set_dual_cam_cmd(
6250 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006251 if (rc < 0) {
6252 LOGE("Dualcam: Unlink failed, but still proceed to close");
6253 }
6254 }
6255
6256 if (rc < 0) {
6257 LOGE("stopAllChannels failed");
6258 return rc;
6259 }
6260 if (mChannelHandle) {
6261 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006262 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006263 }
6264
6265 // Reset bundle info
6266 rc = setBundleInfo();
6267 if (rc < 0) {
6268 LOGE("setBundleInfo failed %d", rc);
6269 return rc;
6270 }
6271
6272 // Mutex Lock
6273 pthread_mutex_lock(&mMutex);
6274
6275 // Unblock process_capture_request
6276 mPendingLiveRequest = 0;
6277 pthread_cond_signal(&mRequestCond);
6278
6279 rc = notifyErrorForPendingRequests();
6280 if (rc < 0) {
6281 LOGE("notifyErrorForPendingRequests failed");
6282 pthread_mutex_unlock(&mMutex);
6283 return rc;
6284 }
6285
6286 mFlush = false;
6287
6288 // Start the Streams/Channels
6289 if (restartChannels) {
6290 rc = startAllChannels();
6291 if (rc < 0) {
6292 LOGE("startAllChannels failed");
6293 pthread_mutex_unlock(&mMutex);
6294 return rc;
6295 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006296 if (mChannelHandle) {
6297 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006298 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006299 if (rc < 0) {
6300 LOGE("start_channel failed");
6301 pthread_mutex_unlock(&mMutex);
6302 return rc;
6303 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006304 }
6305 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006306 pthread_mutex_unlock(&mMutex);
6307
6308 return 0;
6309}
6310
6311/*===========================================================================
6312 * FUNCTION : flushPerf
6313 *
6314 * DESCRIPTION: This is the performance optimization version of flush that does
6315 * not use stream off, rather flushes the system
6316 *
6317 * PARAMETERS :
6318 *
6319 *
6320 * RETURN : 0 : success
6321 * -EINVAL: input is malformed (device is not valid)
6322 * -ENODEV: if the device has encountered a serious error
6323 *==========================================================================*/
6324int QCamera3HardwareInterface::flushPerf()
6325{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006326 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006327 int32_t rc = 0;
6328 struct timespec timeout;
6329 bool timed_wait = false;
6330
6331 pthread_mutex_lock(&mMutex);
6332 mFlushPerf = true;
6333 mPendingBuffersMap.numPendingBufsAtFlush =
6334 mPendingBuffersMap.get_num_overall_buffers();
6335 LOGD("Calling flush. Wait for %d buffers to return",
6336 mPendingBuffersMap.numPendingBufsAtFlush);
6337
6338 /* send the flush event to the backend */
6339 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6340 if (rc < 0) {
6341 LOGE("Error in flush: IOCTL failure");
6342 mFlushPerf = false;
6343 pthread_mutex_unlock(&mMutex);
6344 return -ENODEV;
6345 }
6346
6347 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6348 LOGD("No pending buffers in HAL, return flush");
6349 mFlushPerf = false;
6350 pthread_mutex_unlock(&mMutex);
6351 return rc;
6352 }
6353
6354 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006355 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006356 if (rc < 0) {
6357 LOGE("Error reading the real time clock, cannot use timed wait");
6358 } else {
6359 timeout.tv_sec += FLUSH_TIMEOUT;
6360 timed_wait = true;
6361 }
6362
6363 //Block on conditional variable
6364 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6365 LOGD("Waiting on mBuffersCond");
6366 if (!timed_wait) {
6367 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6368 if (rc != 0) {
6369 LOGE("pthread_cond_wait failed due to rc = %s",
6370 strerror(rc));
6371 break;
6372 }
6373 } else {
6374 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6375 if (rc != 0) {
6376 LOGE("pthread_cond_timedwait failed due to rc = %s",
6377 strerror(rc));
6378 break;
6379 }
6380 }
6381 }
6382 if (rc != 0) {
6383 mFlushPerf = false;
6384 pthread_mutex_unlock(&mMutex);
6385 return -ENODEV;
6386 }
6387
6388 LOGD("Received buffers, now safe to return them");
6389
6390 //make sure the channels handle flush
6391 //currently only required for the picture channel to release snapshot resources
6392 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6393 it != mStreamInfo.end(); it++) {
6394 QCamera3Channel *channel = (*it)->channel;
6395 if (channel) {
6396 rc = channel->flush();
6397 if (rc) {
6398 LOGE("Flushing the channels failed with error %d", rc);
6399 // even though the channel flush failed we need to continue and
6400 // return the buffers we have to the framework, however the return
6401 // value will be an error
6402 rc = -ENODEV;
6403 }
6404 }
6405 }
6406
6407 /* notify the frameworks and send errored results */
6408 rc = notifyErrorForPendingRequests();
6409 if (rc < 0) {
6410 LOGE("notifyErrorForPendingRequests failed");
6411 pthread_mutex_unlock(&mMutex);
6412 return rc;
6413 }
6414
6415 //unblock process_capture_request
6416 mPendingLiveRequest = 0;
6417 unblockRequestIfNecessary();
6418
6419 mFlushPerf = false;
6420 pthread_mutex_unlock(&mMutex);
6421 LOGD ("Flush Operation complete. rc = %d", rc);
6422 return rc;
6423}
6424
6425/*===========================================================================
6426 * FUNCTION : handleCameraDeviceError
6427 *
6428 * DESCRIPTION: This function calls internal flush and notifies the error to
6429 * framework and updates the state variable.
6430 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006431 * PARAMETERS :
6432 * @stopChannelImmediately : stop channels immediately without waiting for
6433 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006434 *
6435 * RETURN : NO_ERROR on Success
6436 * Error code on failure
6437 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006438int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006439{
6440 int32_t rc = NO_ERROR;
6441
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006442 {
6443 Mutex::Autolock lock(mFlushLock);
6444 pthread_mutex_lock(&mMutex);
6445 if (mState != ERROR) {
6446 //if mState != ERROR, nothing to be done
6447 pthread_mutex_unlock(&mMutex);
6448 return NO_ERROR;
6449 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006450 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006451
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006452 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006453 if (NO_ERROR != rc) {
6454 LOGE("internal flush to handle mState = ERROR failed");
6455 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006456
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006457 pthread_mutex_lock(&mMutex);
6458 mState = DEINIT;
6459 pthread_mutex_unlock(&mMutex);
6460 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006461
6462 camera3_notify_msg_t notify_msg;
6463 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6464 notify_msg.type = CAMERA3_MSG_ERROR;
6465 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6466 notify_msg.message.error.error_stream = NULL;
6467 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006468 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006469
6470 return rc;
6471}
6472
6473/*===========================================================================
6474 * FUNCTION : captureResultCb
6475 *
6476 * DESCRIPTION: Callback handler for all capture result
6477 * (streams, as well as metadata)
6478 *
6479 * PARAMETERS :
6480 * @metadata : metadata information
6481 * @buffer : actual gralloc buffer to be returned to frameworks.
6482 * NULL if metadata.
6483 *
6484 * RETURN : NONE
6485 *==========================================================================*/
6486void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6487 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6488{
6489 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006490 pthread_mutex_lock(&mMutex);
6491 uint8_t batchSize = mBatchSize;
6492 pthread_mutex_unlock(&mMutex);
6493 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006494 handleBatchMetadata(metadata_buf,
6495 true /* free_and_bufdone_meta_buf */);
6496 } else { /* mBatchSize = 0 */
6497 hdrPlusPerfLock(metadata_buf);
6498 pthread_mutex_lock(&mMutex);
6499 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006500 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006501 true /* last urgent frame of batch metadata */,
6502 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006503 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006504 pthread_mutex_unlock(&mMutex);
6505 }
6506 } else if (isInputBuffer) {
6507 pthread_mutex_lock(&mMutex);
6508 handleInputBufferWithLock(frame_number);
6509 pthread_mutex_unlock(&mMutex);
6510 } else {
6511 pthread_mutex_lock(&mMutex);
6512 handleBufferWithLock(buffer, frame_number);
6513 pthread_mutex_unlock(&mMutex);
6514 }
6515 return;
6516}
6517
6518/*===========================================================================
6519 * FUNCTION : getReprocessibleOutputStreamId
6520 *
6521 * DESCRIPTION: Get source output stream id for the input reprocess stream
6522 * based on size and format, which would be the largest
6523 * output stream if an input stream exists.
6524 *
6525 * PARAMETERS :
6526 * @id : return the stream id if found
6527 *
6528 * RETURN : int32_t type of status
6529 * NO_ERROR -- success
6530 * none-zero failure code
6531 *==========================================================================*/
6532int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6533{
6534 /* check if any output or bidirectional stream with the same size and format
6535 and return that stream */
6536 if ((mInputStreamInfo.dim.width > 0) &&
6537 (mInputStreamInfo.dim.height > 0)) {
6538 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6539 it != mStreamInfo.end(); it++) {
6540
6541 camera3_stream_t *stream = (*it)->stream;
6542 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6543 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6544 (stream->format == mInputStreamInfo.format)) {
6545 // Usage flag for an input stream and the source output stream
6546 // may be different.
6547 LOGD("Found reprocessible output stream! %p", *it);
6548 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6549 stream->usage, mInputStreamInfo.usage);
6550
6551 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6552 if (channel != NULL && channel->mStreams[0]) {
6553 id = channel->mStreams[0]->getMyServerID();
6554 return NO_ERROR;
6555 }
6556 }
6557 }
6558 } else {
6559 LOGD("No input stream, so no reprocessible output stream");
6560 }
6561 return NAME_NOT_FOUND;
6562}
6563
6564/*===========================================================================
6565 * FUNCTION : lookupFwkName
6566 *
6567 * DESCRIPTION: In case the enum is not same in fwk and backend
6568 * make sure the parameter is correctly propogated
6569 *
6570 * PARAMETERS :
6571 * @arr : map between the two enums
6572 * @len : len of the map
6573 * @hal_name : name of the hal_parm to map
6574 *
6575 * RETURN : int type of status
6576 * fwk_name -- success
6577 * none-zero failure code
6578 *==========================================================================*/
6579template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6580 size_t len, halType hal_name)
6581{
6582
6583 for (size_t i = 0; i < len; i++) {
6584 if (arr[i].hal_name == hal_name) {
6585 return arr[i].fwk_name;
6586 }
6587 }
6588
6589 /* Not able to find matching framework type is not necessarily
6590 * an error case. This happens when mm-camera supports more attributes
6591 * than the frameworks do */
6592 LOGH("Cannot find matching framework type");
6593 return NAME_NOT_FOUND;
6594}
6595
6596/*===========================================================================
6597 * FUNCTION : lookupHalName
6598 *
6599 * DESCRIPTION: In case the enum is not same in fwk and backend
6600 * make sure the parameter is correctly propogated
6601 *
6602 * PARAMETERS :
6603 * @arr : map between the two enums
6604 * @len : len of the map
6605 * @fwk_name : name of the hal_parm to map
6606 *
6607 * RETURN : int32_t type of status
6608 * hal_name -- success
6609 * none-zero failure code
6610 *==========================================================================*/
6611template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6612 size_t len, fwkType fwk_name)
6613{
6614 for (size_t i = 0; i < len; i++) {
6615 if (arr[i].fwk_name == fwk_name) {
6616 return arr[i].hal_name;
6617 }
6618 }
6619
6620 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6621 return NAME_NOT_FOUND;
6622}
6623
6624/*===========================================================================
6625 * FUNCTION : lookupProp
6626 *
6627 * DESCRIPTION: lookup a value by its name
6628 *
6629 * PARAMETERS :
6630 * @arr : map between the two enums
6631 * @len : size of the map
6632 * @name : name to be looked up
6633 *
6634 * RETURN : Value if found
6635 * CAM_CDS_MODE_MAX if not found
6636 *==========================================================================*/
6637template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6638 size_t len, const char *name)
6639{
6640 if (name) {
6641 for (size_t i = 0; i < len; i++) {
6642 if (!strcmp(arr[i].desc, name)) {
6643 return arr[i].val;
6644 }
6645 }
6646 }
6647 return CAM_CDS_MODE_MAX;
6648}
6649
6650/*===========================================================================
6651 *
6652 * DESCRIPTION:
6653 *
6654 * PARAMETERS :
6655 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006656 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006657 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006658 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6659 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006660 *
6661 * RETURN : camera_metadata_t*
6662 * metadata in a format specified by fwk
6663 *==========================================================================*/
6664camera_metadata_t*
6665QCamera3HardwareInterface::translateFromHalMetadata(
6666 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006667 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006668 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006669 bool lastMetadataInBatch,
6670 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006671{
6672 CameraMetadata camMetadata;
6673 camera_metadata_t *resultMetadata;
6674
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006675 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006676 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6677 * Timestamp is needed because it's used for shutter notify calculation.
6678 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006679 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006680 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006681 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006682 }
6683
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006684 if (pendingRequest.jpegMetadata.entryCount())
6685 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006686
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006687 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6688 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6689 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6690 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6691 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006692 if (mBatchSize == 0) {
6693 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006694 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006695 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006696
Samuel Ha68ba5172016-12-15 18:41:12 -08006697 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6698 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006699 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006700 // DevCamDebug metadata translateFromHalMetadata AF
6701 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6702 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6703 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6704 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6705 }
6706 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6707 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6708 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6709 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6710 }
6711 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6712 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6713 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6714 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6715 }
6716 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6717 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6718 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6719 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6720 }
6721 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6722 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6723 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6724 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6725 }
6726 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6727 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6728 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6729 *DevCamDebug_af_monitor_pdaf_target_pos;
6730 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6731 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6732 }
6733 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6734 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6735 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6736 *DevCamDebug_af_monitor_pdaf_confidence;
6737 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6738 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6739 }
6740 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6741 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6742 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6743 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6744 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6745 }
6746 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6747 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6748 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6749 *DevCamDebug_af_monitor_tof_target_pos;
6750 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6751 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6752 }
6753 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6754 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6755 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6756 *DevCamDebug_af_monitor_tof_confidence;
6757 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6758 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6759 }
6760 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6761 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6762 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6763 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6764 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6765 }
6766 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6767 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6768 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6769 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6770 &fwk_DevCamDebug_af_monitor_type_select, 1);
6771 }
6772 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6773 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6774 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6775 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6776 &fwk_DevCamDebug_af_monitor_refocus, 1);
6777 }
6778 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6779 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6780 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6781 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6782 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6783 }
6784 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6785 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6786 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6787 *DevCamDebug_af_search_pdaf_target_pos;
6788 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6789 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6790 }
6791 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6792 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6793 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6794 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6795 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6796 }
6797 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6798 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6799 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6800 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6801 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6802 }
6803 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6804 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6805 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6806 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6807 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6808 }
6809 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6810 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6811 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6812 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6813 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6814 }
6815 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6816 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6817 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6818 *DevCamDebug_af_search_tof_target_pos;
6819 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6820 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6821 }
6822 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6823 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6824 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6825 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6826 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6827 }
6828 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6829 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6830 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6831 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6832 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6833 }
6834 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6835 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6836 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6837 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6838 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6839 }
6840 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6841 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6842 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6843 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6844 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6845 }
6846 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6847 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6848 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6849 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6850 &fwk_DevCamDebug_af_search_type_select, 1);
6851 }
6852 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6853 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6854 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6855 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6856 &fwk_DevCamDebug_af_search_next_pos, 1);
6857 }
6858 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6859 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6860 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6861 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6862 &fwk_DevCamDebug_af_search_target_pos, 1);
6863 }
6864 // DevCamDebug metadata translateFromHalMetadata AEC
6865 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6866 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6867 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6868 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6869 }
6870 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6871 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6872 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6873 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6874 }
6875 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6876 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6877 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6878 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6879 }
6880 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6881 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6882 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6883 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6884 }
6885 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6886 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6887 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6888 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6889 }
6890 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6891 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6892 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6893 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6894 }
6895 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6896 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6897 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6898 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6899 }
6900 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6901 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6902 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6903 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6904 }
Samuel Ha34229982017-02-17 13:51:11 -08006905 // DevCamDebug metadata translateFromHalMetadata zzHDR
6906 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6907 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6908 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6909 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6910 }
6911 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6912 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006913 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006914 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6915 }
6916 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6917 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6918 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6919 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6920 }
6921 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6922 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006923 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006924 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6925 }
6926 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6927 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6928 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6929 *DevCamDebug_aec_hdr_sensitivity_ratio;
6930 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6931 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6932 }
6933 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6934 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6935 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6936 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6937 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6938 }
6939 // DevCamDebug metadata translateFromHalMetadata ADRC
6940 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6941 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6942 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6943 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6944 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6945 }
6946 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6947 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6948 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6949 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6950 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6951 }
6952 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6953 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6954 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6955 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6956 }
6957 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6958 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6959 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6960 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6961 }
6962 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6963 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6964 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6965 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6966 }
6967 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6968 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6969 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6970 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6971 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006972 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6973 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6974 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6975 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6976 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6977 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6978 }
6979 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6980 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6981 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6982 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6983 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6984 }
6985 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6986 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6987 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6988 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6989 &fwk_DevCamDebug_aec_subject_motion, 1);
6990 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006991 // DevCamDebug metadata translateFromHalMetadata AWB
6992 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6993 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6994 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6995 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6996 }
6997 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6998 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6999 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7000 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7001 }
7002 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7003 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7004 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7005 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7006 }
7007 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7008 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7009 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7010 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7011 }
7012 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7013 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7014 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7015 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7016 }
7017 }
7018 // atrace_end(ATRACE_TAG_ALWAYS);
7019
Thierry Strudel3d639192016-09-09 11:52:26 -07007020 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7021 int64_t fwk_frame_number = *frame_number;
7022 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7023 }
7024
7025 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7026 int32_t fps_range[2];
7027 fps_range[0] = (int32_t)float_range->min_fps;
7028 fps_range[1] = (int32_t)float_range->max_fps;
7029 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7030 fps_range, 2);
7031 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7032 fps_range[0], fps_range[1]);
7033 }
7034
7035 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7036 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7037 }
7038
7039 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7040 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7041 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7042 *sceneMode);
7043 if (NAME_NOT_FOUND != val) {
7044 uint8_t fwkSceneMode = (uint8_t)val;
7045 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7046 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7047 fwkSceneMode);
7048 }
7049 }
7050
7051 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7052 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7053 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7054 }
7055
7056 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7057 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7058 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7059 }
7060
7061 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7062 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7063 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7064 }
7065
7066 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7067 CAM_INTF_META_EDGE_MODE, metadata) {
7068 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7069 }
7070
7071 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7072 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7073 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7074 }
7075
7076 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7077 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7078 }
7079
7080 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7081 if (0 <= *flashState) {
7082 uint8_t fwk_flashState = (uint8_t) *flashState;
7083 if (!gCamCapability[mCameraId]->flash_available) {
7084 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7085 }
7086 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7087 }
7088 }
7089
7090 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7091 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7092 if (NAME_NOT_FOUND != val) {
7093 uint8_t fwk_flashMode = (uint8_t)val;
7094 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7095 }
7096 }
7097
7098 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7099 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7100 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7101 }
7102
7103 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7104 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7105 }
7106
7107 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7108 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7109 }
7110
7111 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7112 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7113 }
7114
7115 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7116 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7117 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7118 }
7119
7120 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7121 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7122 LOGD("fwk_videoStab = %d", fwk_videoStab);
7123 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7124 } else {
7125 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7126 // and so hardcoding the Video Stab result to OFF mode.
7127 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7128 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007129 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007130 }
7131
7132 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7133 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7134 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7135 }
7136
7137 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7138 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7139 }
7140
Thierry Strudel3d639192016-09-09 11:52:26 -07007141 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7142 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007143 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007144
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007145 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7146 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007147
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007148 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007149 blackLevelAppliedPattern->cam_black_level[0],
7150 blackLevelAppliedPattern->cam_black_level[1],
7151 blackLevelAppliedPattern->cam_black_level[2],
7152 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007153 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7154 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007155
7156#ifndef USE_HAL_3_3
7157 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307158 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007159 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307160 fwk_blackLevelInd[0] /= 16.0;
7161 fwk_blackLevelInd[1] /= 16.0;
7162 fwk_blackLevelInd[2] /= 16.0;
7163 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007164 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7165 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007166#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007167 }
7168
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007169#ifndef USE_HAL_3_3
7170 // Fixed whitelevel is used by ISP/Sensor
7171 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7172 &gCamCapability[mCameraId]->white_level, 1);
7173#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007174
7175 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7176 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7177 int32_t scalerCropRegion[4];
7178 scalerCropRegion[0] = hScalerCropRegion->left;
7179 scalerCropRegion[1] = hScalerCropRegion->top;
7180 scalerCropRegion[2] = hScalerCropRegion->width;
7181 scalerCropRegion[3] = hScalerCropRegion->height;
7182
7183 // Adjust crop region from sensor output coordinate system to active
7184 // array coordinate system.
7185 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7186 scalerCropRegion[2], scalerCropRegion[3]);
7187
7188 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7189 }
7190
7191 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7192 LOGD("sensorExpTime = %lld", *sensorExpTime);
7193 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7194 }
7195
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007196 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7197 LOGD("expTimeBoost = %f", *expTimeBoost);
7198 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7199 }
7200
Thierry Strudel3d639192016-09-09 11:52:26 -07007201 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7202 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7203 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7204 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7205 }
7206
7207 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7208 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7209 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7210 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7211 sensorRollingShutterSkew, 1);
7212 }
7213
7214 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7215 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7216 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7217
7218 //calculate the noise profile based on sensitivity
7219 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7220 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7221 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7222 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7223 noise_profile[i] = noise_profile_S;
7224 noise_profile[i+1] = noise_profile_O;
7225 }
7226 LOGD("noise model entry (S, O) is (%f, %f)",
7227 noise_profile_S, noise_profile_O);
7228 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7229 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7230 }
7231
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007232#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007233 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007234 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007235 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007236 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007237 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7238 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7239 }
7240 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007241#endif
7242
Thierry Strudel3d639192016-09-09 11:52:26 -07007243 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7244 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7245 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7246 }
7247
7248 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7249 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7250 *faceDetectMode);
7251 if (NAME_NOT_FOUND != val) {
7252 uint8_t fwk_faceDetectMode = (uint8_t)val;
7253 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7254
7255 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7256 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7257 CAM_INTF_META_FACE_DETECTION, metadata) {
7258 uint8_t numFaces = MIN(
7259 faceDetectionInfo->num_faces_detected, MAX_ROI);
7260 int32_t faceIds[MAX_ROI];
7261 uint8_t faceScores[MAX_ROI];
7262 int32_t faceRectangles[MAX_ROI * 4];
7263 int32_t faceLandmarks[MAX_ROI * 6];
7264 size_t j = 0, k = 0;
7265
7266 for (size_t i = 0; i < numFaces; i++) {
7267 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7268 // Adjust crop region from sensor output coordinate system to active
7269 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007270 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007271 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7272 rect.width, rect.height);
7273
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007274 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007275
Jason Lee8ce36fa2017-04-19 19:40:37 -07007276 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7277 "bottom-right (%d, %d)",
7278 faceDetectionInfo->frame_id, i,
7279 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7280 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7281
Thierry Strudel3d639192016-09-09 11:52:26 -07007282 j+= 4;
7283 }
7284 if (numFaces <= 0) {
7285 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7286 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7287 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7288 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7289 }
7290
7291 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7292 numFaces);
7293 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7294 faceRectangles, numFaces * 4U);
7295 if (fwk_faceDetectMode ==
7296 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7297 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7298 CAM_INTF_META_FACE_LANDMARK, metadata) {
7299
7300 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007301 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007302 // Map the co-ordinate sensor output coordinate system to active
7303 // array coordinate system.
7304 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007305 face_landmarks.left_eye_center.x,
7306 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007307 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007308 face_landmarks.right_eye_center.x,
7309 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007310 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007311 face_landmarks.mouth_center.x,
7312 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007313
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007314 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007315
7316 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7317 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7318 faceDetectionInfo->frame_id, i,
7319 faceLandmarks[k + LEFT_EYE_X],
7320 faceLandmarks[k + LEFT_EYE_Y],
7321 faceLandmarks[k + RIGHT_EYE_X],
7322 faceLandmarks[k + RIGHT_EYE_Y],
7323 faceLandmarks[k + MOUTH_X],
7324 faceLandmarks[k + MOUTH_Y]);
7325
Thierry Strudel04e026f2016-10-10 11:27:36 -07007326 k+= TOTAL_LANDMARK_INDICES;
7327 }
7328 } else {
7329 for (size_t i = 0; i < numFaces; i++) {
7330 setInvalidLandmarks(faceLandmarks+k);
7331 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007332 }
7333 }
7334
Jason Lee49619db2017-04-13 12:07:22 -07007335 for (size_t i = 0; i < numFaces; i++) {
7336 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7337
7338 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7339 faceDetectionInfo->frame_id, i, faceIds[i]);
7340 }
7341
Thierry Strudel3d639192016-09-09 11:52:26 -07007342 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7343 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7344 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007345 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007346 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7347 CAM_INTF_META_FACE_BLINK, metadata) {
7348 uint8_t detected[MAX_ROI];
7349 uint8_t degree[MAX_ROI * 2];
7350 for (size_t i = 0; i < numFaces; i++) {
7351 detected[i] = blinks->blink[i].blink_detected;
7352 degree[2 * i] = blinks->blink[i].left_blink;
7353 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007354
Jason Lee49619db2017-04-13 12:07:22 -07007355 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7356 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7357 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7358 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007359 }
7360 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7361 detected, numFaces);
7362 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7363 degree, numFaces * 2);
7364 }
7365 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7366 CAM_INTF_META_FACE_SMILE, metadata) {
7367 uint8_t degree[MAX_ROI];
7368 uint8_t confidence[MAX_ROI];
7369 for (size_t i = 0; i < numFaces; i++) {
7370 degree[i] = smiles->smile[i].smile_degree;
7371 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007372
Jason Lee49619db2017-04-13 12:07:22 -07007373 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7374 "smile_degree=%d, smile_score=%d",
7375 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007376 }
7377 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7378 degree, numFaces);
7379 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7380 confidence, numFaces);
7381 }
7382 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7383 CAM_INTF_META_FACE_GAZE, metadata) {
7384 int8_t angle[MAX_ROI];
7385 int32_t direction[MAX_ROI * 3];
7386 int8_t degree[MAX_ROI * 2];
7387 for (size_t i = 0; i < numFaces; i++) {
7388 angle[i] = gazes->gaze[i].gaze_angle;
7389 direction[3 * i] = gazes->gaze[i].updown_dir;
7390 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7391 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7392 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7393 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007394
7395 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7396 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7397 "left_right_gaze=%d, top_bottom_gaze=%d",
7398 faceDetectionInfo->frame_id, i, angle[i],
7399 direction[3 * i], direction[3 * i + 1],
7400 direction[3 * i + 2],
7401 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007402 }
7403 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7404 (uint8_t *)angle, numFaces);
7405 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7406 direction, numFaces * 3);
7407 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7408 (uint8_t *)degree, numFaces * 2);
7409 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007410 }
7411 }
7412 }
7413 }
7414
7415 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7416 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007417 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007418 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007419 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007420
Shuzhen Wang14415f52016-11-16 18:26:18 -08007421 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7422 histogramBins = *histBins;
7423 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7424 }
7425
7426 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007427 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7428 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007429 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007430
7431 switch (stats_data->type) {
7432 case CAM_HISTOGRAM_TYPE_BAYER:
7433 switch (stats_data->bayer_stats.data_type) {
7434 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007435 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7436 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007437 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007438 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7439 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007440 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007441 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7442 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007443 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007444 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007445 case CAM_STATS_CHANNEL_R:
7446 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007447 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7448 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007449 }
7450 break;
7451 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007452 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007453 break;
7454 }
7455
Shuzhen Wang14415f52016-11-16 18:26:18 -08007456 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007457 }
7458 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007459 }
7460
7461 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7462 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7463 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7464 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7465 }
7466
7467 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7468 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7469 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7470 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7471 }
7472
7473 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7474 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7475 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7476 CAM_MAX_SHADING_MAP_HEIGHT);
7477 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7478 CAM_MAX_SHADING_MAP_WIDTH);
7479 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7480 lensShadingMap->lens_shading, 4U * map_width * map_height);
7481 }
7482
7483 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7484 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7485 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7486 }
7487
7488 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7489 //Populate CAM_INTF_META_TONEMAP_CURVES
7490 /* ch0 = G, ch 1 = B, ch 2 = R*/
7491 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7492 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7493 tonemap->tonemap_points_cnt,
7494 CAM_MAX_TONEMAP_CURVE_SIZE);
7495 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7496 }
7497
7498 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7499 &tonemap->curves[0].tonemap_points[0][0],
7500 tonemap->tonemap_points_cnt * 2);
7501
7502 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7503 &tonemap->curves[1].tonemap_points[0][0],
7504 tonemap->tonemap_points_cnt * 2);
7505
7506 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7507 &tonemap->curves[2].tonemap_points[0][0],
7508 tonemap->tonemap_points_cnt * 2);
7509 }
7510
7511 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7512 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7513 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7514 CC_GAIN_MAX);
7515 }
7516
7517 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7518 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7519 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7520 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7521 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7522 }
7523
7524 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7525 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7526 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7527 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7528 toneCurve->tonemap_points_cnt,
7529 CAM_MAX_TONEMAP_CURVE_SIZE);
7530 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7531 }
7532 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7533 (float*)toneCurve->curve.tonemap_points,
7534 toneCurve->tonemap_points_cnt * 2);
7535 }
7536
7537 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7538 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7539 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7540 predColorCorrectionGains->gains, 4);
7541 }
7542
7543 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7544 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7545 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7546 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7547 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7548 }
7549
7550 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7551 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7552 }
7553
7554 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7555 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7556 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7557 }
7558
7559 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7560 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7561 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7562 }
7563
7564 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7565 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7566 *effectMode);
7567 if (NAME_NOT_FOUND != val) {
7568 uint8_t fwk_effectMode = (uint8_t)val;
7569 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7570 }
7571 }
7572
7573 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7574 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7575 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7576 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7577 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7578 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7579 }
7580 int32_t fwk_testPatternData[4];
7581 fwk_testPatternData[0] = testPatternData->r;
7582 fwk_testPatternData[3] = testPatternData->b;
7583 switch (gCamCapability[mCameraId]->color_arrangement) {
7584 case CAM_FILTER_ARRANGEMENT_RGGB:
7585 case CAM_FILTER_ARRANGEMENT_GRBG:
7586 fwk_testPatternData[1] = testPatternData->gr;
7587 fwk_testPatternData[2] = testPatternData->gb;
7588 break;
7589 case CAM_FILTER_ARRANGEMENT_GBRG:
7590 case CAM_FILTER_ARRANGEMENT_BGGR:
7591 fwk_testPatternData[2] = testPatternData->gr;
7592 fwk_testPatternData[1] = testPatternData->gb;
7593 break;
7594 default:
7595 LOGE("color arrangement %d is not supported",
7596 gCamCapability[mCameraId]->color_arrangement);
7597 break;
7598 }
7599 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7600 }
7601
7602 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7603 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7604 }
7605
7606 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7607 String8 str((const char *)gps_methods);
7608 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7609 }
7610
7611 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7612 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7613 }
7614
7615 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7616 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7617 }
7618
7619 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7620 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7621 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7622 }
7623
7624 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7625 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7626 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7627 }
7628
7629 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7630 int32_t fwk_thumb_size[2];
7631 fwk_thumb_size[0] = thumb_size->width;
7632 fwk_thumb_size[1] = thumb_size->height;
7633 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7634 }
7635
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007636 // Skip reprocess metadata if there is no input stream.
7637 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7638 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7639 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7640 privateData,
7641 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7642 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007643 }
7644
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007645 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007646 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007647 meteringMode, 1);
7648 }
7649
Thierry Strudel54dc9782017-02-15 12:12:10 -08007650 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7651 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7652 LOGD("hdr_scene_data: %d %f\n",
7653 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7654 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7655 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7656 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7657 &isHdr, 1);
7658 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7659 &isHdrConfidence, 1);
7660 }
7661
7662
7663
Thierry Strudel3d639192016-09-09 11:52:26 -07007664 if (metadata->is_tuning_params_valid) {
7665 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7666 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7667 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7668
7669
7670 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7671 sizeof(uint32_t));
7672 data += sizeof(uint32_t);
7673
7674 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7675 sizeof(uint32_t));
7676 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7677 data += sizeof(uint32_t);
7678
7679 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7680 sizeof(uint32_t));
7681 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7682 data += sizeof(uint32_t);
7683
7684 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7685 sizeof(uint32_t));
7686 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7687 data += sizeof(uint32_t);
7688
7689 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7690 sizeof(uint32_t));
7691 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7692 data += sizeof(uint32_t);
7693
7694 metadata->tuning_params.tuning_mod3_data_size = 0;
7695 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7696 sizeof(uint32_t));
7697 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7698 data += sizeof(uint32_t);
7699
7700 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7701 TUNING_SENSOR_DATA_MAX);
7702 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7703 count);
7704 data += count;
7705
7706 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7707 TUNING_VFE_DATA_MAX);
7708 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7709 count);
7710 data += count;
7711
7712 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7713 TUNING_CPP_DATA_MAX);
7714 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7715 count);
7716 data += count;
7717
7718 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7719 TUNING_CAC_DATA_MAX);
7720 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7721 count);
7722 data += count;
7723
7724 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7725 (int32_t *)(void *)tuning_meta_data_blob,
7726 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7727 }
7728
7729 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7730 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7731 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7732 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7733 NEUTRAL_COL_POINTS);
7734 }
7735
7736 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7737 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7738 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7739 }
7740
7741 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7742 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7743 // Adjust crop region from sensor output coordinate system to active
7744 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007745 cam_rect_t hAeRect = hAeRegions->rect;
7746 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7747 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007748
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007749 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007750 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7751 REGIONS_TUPLE_COUNT);
7752 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7753 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007754 hAeRect.left, hAeRect.top, hAeRect.width,
7755 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007756 }
7757
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007758 if (!pendingRequest.focusStateSent) {
7759 if (pendingRequest.focusStateValid) {
7760 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7761 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007762 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007763 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7764 uint8_t fwk_afState = (uint8_t) *afState;
7765 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7766 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7767 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007768 }
7769 }
7770
Thierry Strudel3d639192016-09-09 11:52:26 -07007771 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7772 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7773 }
7774
7775 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7776 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7777 }
7778
7779 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7780 uint8_t fwk_lensState = *lensState;
7781 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7782 }
7783
Thierry Strudel3d639192016-09-09 11:52:26 -07007784 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007785 uint32_t ab_mode = *hal_ab_mode;
7786 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7787 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7788 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7789 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007790 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007791 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007792 if (NAME_NOT_FOUND != val) {
7793 uint8_t fwk_ab_mode = (uint8_t)val;
7794 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7795 }
7796 }
7797
7798 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7799 int val = lookupFwkName(SCENE_MODES_MAP,
7800 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7801 if (NAME_NOT_FOUND != val) {
7802 uint8_t fwkBestshotMode = (uint8_t)val;
7803 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7804 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7805 } else {
7806 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7807 }
7808 }
7809
7810 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7811 uint8_t fwk_mode = (uint8_t) *mode;
7812 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7813 }
7814
7815 /* Constant metadata values to be update*/
7816 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7817 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7818
7819 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7820 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7821
7822 int32_t hotPixelMap[2];
7823 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7824
7825 // CDS
7826 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7827 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7828 }
7829
Thierry Strudel04e026f2016-10-10 11:27:36 -07007830 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7831 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007832 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007833 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7834 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7835 } else {
7836 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7837 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007838
7839 if(fwk_hdr != curr_hdr_state) {
7840 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7841 if(fwk_hdr)
7842 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7843 else
7844 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7845 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007846 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7847 }
7848
Thierry Strudel54dc9782017-02-15 12:12:10 -08007849 //binning correction
7850 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7851 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7852 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7853 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7854 }
7855
Thierry Strudel04e026f2016-10-10 11:27:36 -07007856 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007857 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007858 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7859 int8_t is_ir_on = 0;
7860
7861 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7862 if(is_ir_on != curr_ir_state) {
7863 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7864 if(is_ir_on)
7865 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7866 else
7867 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7868 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007869 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007870 }
7871
Thierry Strudel269c81a2016-10-12 12:13:59 -07007872 // AEC SPEED
7873 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7874 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7875 }
7876
7877 // AWB SPEED
7878 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7879 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7880 }
7881
Thierry Strudel3d639192016-09-09 11:52:26 -07007882 // TNR
7883 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7884 uint8_t tnr_enable = tnr->denoise_enable;
7885 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007886 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7887 int8_t is_tnr_on = 0;
7888
7889 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7890 if(is_tnr_on != curr_tnr_state) {
7891 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7892 if(is_tnr_on)
7893 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7894 else
7895 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7896 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007897
7898 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7899 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7900 }
7901
7902 // Reprocess crop data
7903 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7904 uint8_t cnt = crop_data->num_of_streams;
7905 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7906 // mm-qcamera-daemon only posts crop_data for streams
7907 // not linked to pproc. So no valid crop metadata is not
7908 // necessarily an error case.
7909 LOGD("No valid crop metadata entries");
7910 } else {
7911 uint32_t reproc_stream_id;
7912 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7913 LOGD("No reprocessible stream found, ignore crop data");
7914 } else {
7915 int rc = NO_ERROR;
7916 Vector<int32_t> roi_map;
7917 int32_t *crop = new int32_t[cnt*4];
7918 if (NULL == crop) {
7919 rc = NO_MEMORY;
7920 }
7921 if (NO_ERROR == rc) {
7922 int32_t streams_found = 0;
7923 for (size_t i = 0; i < cnt; i++) {
7924 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7925 if (pprocDone) {
7926 // HAL already does internal reprocessing,
7927 // either via reprocessing before JPEG encoding,
7928 // or offline postprocessing for pproc bypass case.
7929 crop[0] = 0;
7930 crop[1] = 0;
7931 crop[2] = mInputStreamInfo.dim.width;
7932 crop[3] = mInputStreamInfo.dim.height;
7933 } else {
7934 crop[0] = crop_data->crop_info[i].crop.left;
7935 crop[1] = crop_data->crop_info[i].crop.top;
7936 crop[2] = crop_data->crop_info[i].crop.width;
7937 crop[3] = crop_data->crop_info[i].crop.height;
7938 }
7939 roi_map.add(crop_data->crop_info[i].roi_map.left);
7940 roi_map.add(crop_data->crop_info[i].roi_map.top);
7941 roi_map.add(crop_data->crop_info[i].roi_map.width);
7942 roi_map.add(crop_data->crop_info[i].roi_map.height);
7943 streams_found++;
7944 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7945 crop[0], crop[1], crop[2], crop[3]);
7946 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7947 crop_data->crop_info[i].roi_map.left,
7948 crop_data->crop_info[i].roi_map.top,
7949 crop_data->crop_info[i].roi_map.width,
7950 crop_data->crop_info[i].roi_map.height);
7951 break;
7952
7953 }
7954 }
7955 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7956 &streams_found, 1);
7957 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7958 crop, (size_t)(streams_found * 4));
7959 if (roi_map.array()) {
7960 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7961 roi_map.array(), roi_map.size());
7962 }
7963 }
7964 if (crop) {
7965 delete [] crop;
7966 }
7967 }
7968 }
7969 }
7970
7971 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7972 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7973 // so hardcoding the CAC result to OFF mode.
7974 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7975 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7976 } else {
7977 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7978 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7979 *cacMode);
7980 if (NAME_NOT_FOUND != val) {
7981 uint8_t resultCacMode = (uint8_t)val;
7982 // check whether CAC result from CB is equal to Framework set CAC mode
7983 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007984 if (pendingRequest.fwkCacMode != resultCacMode) {
7985 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007986 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007987 //Check if CAC is disabled by property
7988 if (m_cacModeDisabled) {
7989 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7990 }
7991
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007992 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007993 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7994 } else {
7995 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7996 }
7997 }
7998 }
7999
8000 // Post blob of cam_cds_data through vendor tag.
8001 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8002 uint8_t cnt = cdsInfo->num_of_streams;
8003 cam_cds_data_t cdsDataOverride;
8004 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8005 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8006 cdsDataOverride.num_of_streams = 1;
8007 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8008 uint32_t reproc_stream_id;
8009 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8010 LOGD("No reprocessible stream found, ignore cds data");
8011 } else {
8012 for (size_t i = 0; i < cnt; i++) {
8013 if (cdsInfo->cds_info[i].stream_id ==
8014 reproc_stream_id) {
8015 cdsDataOverride.cds_info[0].cds_enable =
8016 cdsInfo->cds_info[i].cds_enable;
8017 break;
8018 }
8019 }
8020 }
8021 } else {
8022 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8023 }
8024 camMetadata.update(QCAMERA3_CDS_INFO,
8025 (uint8_t *)&cdsDataOverride,
8026 sizeof(cam_cds_data_t));
8027 }
8028
8029 // Ldaf calibration data
8030 if (!mLdafCalibExist) {
8031 IF_META_AVAILABLE(uint32_t, ldafCalib,
8032 CAM_INTF_META_LDAF_EXIF, metadata) {
8033 mLdafCalibExist = true;
8034 mLdafCalib[0] = ldafCalib[0];
8035 mLdafCalib[1] = ldafCalib[1];
8036 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8037 ldafCalib[0], ldafCalib[1]);
8038 }
8039 }
8040
Thierry Strudel54dc9782017-02-15 12:12:10 -08008041 // EXIF debug data through vendor tag
8042 /*
8043 * Mobicat Mask can assume 3 values:
8044 * 1 refers to Mobicat data,
8045 * 2 refers to Stats Debug and Exif Debug Data
8046 * 3 refers to Mobicat and Stats Debug Data
8047 * We want to make sure that we are sending Exif debug data
8048 * only when Mobicat Mask is 2.
8049 */
8050 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8051 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8052 (uint8_t *)(void *)mExifParams.debug_params,
8053 sizeof(mm_jpeg_debug_exif_params_t));
8054 }
8055
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008056 // Reprocess and DDM debug data through vendor tag
8057 cam_reprocess_info_t repro_info;
8058 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008059 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8060 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008061 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008062 }
8063 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8064 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008065 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008066 }
8067 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8068 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008069 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008070 }
8071 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8072 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008073 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008074 }
8075 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8076 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008077 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008078 }
8079 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008080 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008081 }
8082 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8083 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008084 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008085 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008086 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8087 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8088 }
8089 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8090 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8091 }
8092 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8093 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008094
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008095 // INSTANT AEC MODE
8096 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8097 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8098 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8099 }
8100
Shuzhen Wange763e802016-03-31 10:24:29 -07008101 // AF scene change
8102 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8103 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8104 }
8105
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008106 // Enable ZSL
8107 if (enableZsl != nullptr) {
8108 uint8_t value = *enableZsl ?
8109 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8110 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8111 }
8112
Xu Han821ea9c2017-05-23 09:00:40 -07008113 // OIS Data
8114 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8115 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8116 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8117 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8118 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8119 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8120 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8121 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8122 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8123 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8124 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8125 }
8126
Thierry Strudel3d639192016-09-09 11:52:26 -07008127 resultMetadata = camMetadata.release();
8128 return resultMetadata;
8129}
8130
8131/*===========================================================================
8132 * FUNCTION : saveExifParams
8133 *
8134 * DESCRIPTION:
8135 *
8136 * PARAMETERS :
8137 * @metadata : metadata information from callback
8138 *
8139 * RETURN : none
8140 *
8141 *==========================================================================*/
8142void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8143{
8144 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8145 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8146 if (mExifParams.debug_params) {
8147 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8148 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8149 }
8150 }
8151 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8152 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8153 if (mExifParams.debug_params) {
8154 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8155 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8156 }
8157 }
8158 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8159 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8160 if (mExifParams.debug_params) {
8161 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8162 mExifParams.debug_params->af_debug_params_valid = TRUE;
8163 }
8164 }
8165 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8166 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8167 if (mExifParams.debug_params) {
8168 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8169 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8170 }
8171 }
8172 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8173 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8174 if (mExifParams.debug_params) {
8175 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8176 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8177 }
8178 }
8179 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8180 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8181 if (mExifParams.debug_params) {
8182 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8183 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8184 }
8185 }
8186 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8187 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8188 if (mExifParams.debug_params) {
8189 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8190 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8191 }
8192 }
8193 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8194 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8195 if (mExifParams.debug_params) {
8196 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8197 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8198 }
8199 }
8200}
8201
8202/*===========================================================================
8203 * FUNCTION : get3AExifParams
8204 *
8205 * DESCRIPTION:
8206 *
8207 * PARAMETERS : none
8208 *
8209 *
8210 * RETURN : mm_jpeg_exif_params_t
8211 *
8212 *==========================================================================*/
8213mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8214{
8215 return mExifParams;
8216}
8217
8218/*===========================================================================
8219 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8220 *
8221 * DESCRIPTION:
8222 *
8223 * PARAMETERS :
8224 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008225 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8226 * urgent metadata in a batch. Always true for
8227 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008228 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008229 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8230 * i.e. even though it doesn't map to a valid partial
8231 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008232 * RETURN : camera_metadata_t*
8233 * metadata in a format specified by fwk
8234 *==========================================================================*/
8235camera_metadata_t*
8236QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008237 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008238 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008239{
8240 CameraMetadata camMetadata;
8241 camera_metadata_t *resultMetadata;
8242
Shuzhen Wang485e2442017-08-02 12:21:08 -07008243 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008244 /* In batch mode, use empty metadata if this is not the last in batch
8245 */
8246 resultMetadata = allocate_camera_metadata(0, 0);
8247 return resultMetadata;
8248 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008249
8250 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8251 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8252 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8253 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8254 }
8255
8256 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8257 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8258 &aecTrigger->trigger, 1);
8259 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8260 &aecTrigger->trigger_id, 1);
8261 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8262 aecTrigger->trigger);
8263 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8264 aecTrigger->trigger_id);
8265 }
8266
8267 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8268 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8269 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8270 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8271 }
8272
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008273 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8274 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8275 if (NAME_NOT_FOUND != val) {
8276 uint8_t fwkAfMode = (uint8_t)val;
8277 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8278 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8279 } else {
8280 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8281 val);
8282 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008283 }
8284
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008285 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8286 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8287 af_trigger->trigger);
8288 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8289 af_trigger->trigger_id);
8290
8291 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8292 mAfTrigger = *af_trigger;
8293 uint32_t fwk_AfState = (uint32_t) *afState;
8294
8295 // If this is the result for a new trigger, check if there is new early
8296 // af state. If there is, use the last af state for all results
8297 // preceding current partial frame number.
8298 for (auto & pendingRequest : mPendingRequestsList) {
8299 if (pendingRequest.frame_number < frame_number) {
8300 pendingRequest.focusStateValid = true;
8301 pendingRequest.focusState = fwk_AfState;
8302 } else if (pendingRequest.frame_number == frame_number) {
8303 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8304 // Check if early AF state for trigger exists. If yes, send AF state as
8305 // partial result for better latency.
8306 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8307 pendingRequest.focusStateSent = true;
8308 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8309 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8310 frame_number, fwkEarlyAfState);
8311 }
8312 }
8313 }
8314 }
8315 }
8316 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8317 &mAfTrigger.trigger, 1);
8318 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8319
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008320 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8321 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008322 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008323 int32_t afRegions[REGIONS_TUPLE_COUNT];
8324 // Adjust crop region from sensor output coordinate system to active
8325 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008326 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8327 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008328
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008329 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008330 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8331 REGIONS_TUPLE_COUNT);
8332 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8333 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008334 hAfRect.left, hAfRect.top, hAfRect.width,
8335 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008336 }
8337
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008338 // AF region confidence
8339 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8340 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8341 }
8342
Thierry Strudel3d639192016-09-09 11:52:26 -07008343 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8344 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8345 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8346 if (NAME_NOT_FOUND != val) {
8347 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8348 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8349 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8350 } else {
8351 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8352 }
8353 }
8354
8355 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8356 uint32_t aeMode = CAM_AE_MODE_MAX;
8357 int32_t flashMode = CAM_FLASH_MODE_MAX;
8358 int32_t redeye = -1;
8359 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8360 aeMode = *pAeMode;
8361 }
8362 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8363 flashMode = *pFlashMode;
8364 }
8365 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8366 redeye = *pRedeye;
8367 }
8368
8369 if (1 == redeye) {
8370 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8371 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8372 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8373 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8374 flashMode);
8375 if (NAME_NOT_FOUND != val) {
8376 fwk_aeMode = (uint8_t)val;
8377 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8378 } else {
8379 LOGE("Unsupported flash mode %d", flashMode);
8380 }
8381 } else if (aeMode == CAM_AE_MODE_ON) {
8382 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8383 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8384 } else if (aeMode == CAM_AE_MODE_OFF) {
8385 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8386 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008387 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8388 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8389 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008390 } else {
8391 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8392 "flashMode:%d, aeMode:%u!!!",
8393 redeye, flashMode, aeMode);
8394 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008395 if (mInstantAEC) {
8396 // Increment frame Idx count untill a bound reached for instant AEC.
8397 mInstantAecFrameIdxCount++;
8398 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8399 CAM_INTF_META_AEC_INFO, metadata) {
8400 LOGH("ae_params->settled = %d",ae_params->settled);
8401 // If AEC settled, or if number of frames reached bound value,
8402 // should reset instant AEC.
8403 if (ae_params->settled ||
8404 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8405 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8406 mInstantAEC = false;
8407 mResetInstantAEC = true;
8408 mInstantAecFrameIdxCount = 0;
8409 }
8410 }
8411 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008412 resultMetadata = camMetadata.release();
8413 return resultMetadata;
8414}
8415
8416/*===========================================================================
8417 * FUNCTION : dumpMetadataToFile
8418 *
8419 * DESCRIPTION: Dumps tuning metadata to file system
8420 *
8421 * PARAMETERS :
8422 * @meta : tuning metadata
8423 * @dumpFrameCount : current dump frame count
8424 * @enabled : Enable mask
8425 *
8426 *==========================================================================*/
8427void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8428 uint32_t &dumpFrameCount,
8429 bool enabled,
8430 const char *type,
8431 uint32_t frameNumber)
8432{
8433 //Some sanity checks
8434 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8435 LOGE("Tuning sensor data size bigger than expected %d: %d",
8436 meta.tuning_sensor_data_size,
8437 TUNING_SENSOR_DATA_MAX);
8438 return;
8439 }
8440
8441 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8442 LOGE("Tuning VFE data size bigger than expected %d: %d",
8443 meta.tuning_vfe_data_size,
8444 TUNING_VFE_DATA_MAX);
8445 return;
8446 }
8447
8448 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8449 LOGE("Tuning CPP data size bigger than expected %d: %d",
8450 meta.tuning_cpp_data_size,
8451 TUNING_CPP_DATA_MAX);
8452 return;
8453 }
8454
8455 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8456 LOGE("Tuning CAC data size bigger than expected %d: %d",
8457 meta.tuning_cac_data_size,
8458 TUNING_CAC_DATA_MAX);
8459 return;
8460 }
8461 //
8462
8463 if(enabled){
8464 char timeBuf[FILENAME_MAX];
8465 char buf[FILENAME_MAX];
8466 memset(buf, 0, sizeof(buf));
8467 memset(timeBuf, 0, sizeof(timeBuf));
8468 time_t current_time;
8469 struct tm * timeinfo;
8470 time (&current_time);
8471 timeinfo = localtime (&current_time);
8472 if (timeinfo != NULL) {
8473 strftime (timeBuf, sizeof(timeBuf),
8474 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8475 }
8476 String8 filePath(timeBuf);
8477 snprintf(buf,
8478 sizeof(buf),
8479 "%dm_%s_%d.bin",
8480 dumpFrameCount,
8481 type,
8482 frameNumber);
8483 filePath.append(buf);
8484 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8485 if (file_fd >= 0) {
8486 ssize_t written_len = 0;
8487 meta.tuning_data_version = TUNING_DATA_VERSION;
8488 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8489 written_len += write(file_fd, data, sizeof(uint32_t));
8490 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8491 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8492 written_len += write(file_fd, data, sizeof(uint32_t));
8493 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8494 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8495 written_len += write(file_fd, data, sizeof(uint32_t));
8496 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8497 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8498 written_len += write(file_fd, data, sizeof(uint32_t));
8499 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8500 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8501 written_len += write(file_fd, data, sizeof(uint32_t));
8502 meta.tuning_mod3_data_size = 0;
8503 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8504 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8505 written_len += write(file_fd, data, sizeof(uint32_t));
8506 size_t total_size = meta.tuning_sensor_data_size;
8507 data = (void *)((uint8_t *)&meta.data);
8508 written_len += write(file_fd, data, total_size);
8509 total_size = meta.tuning_vfe_data_size;
8510 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8511 written_len += write(file_fd, data, total_size);
8512 total_size = meta.tuning_cpp_data_size;
8513 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8514 written_len += write(file_fd, data, total_size);
8515 total_size = meta.tuning_cac_data_size;
8516 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8517 written_len += write(file_fd, data, total_size);
8518 close(file_fd);
8519 }else {
8520 LOGE("fail to open file for metadata dumping");
8521 }
8522 }
8523}
8524
8525/*===========================================================================
8526 * FUNCTION : cleanAndSortStreamInfo
8527 *
8528 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8529 * and sort them such that raw stream is at the end of the list
8530 * This is a workaround for camera daemon constraint.
8531 *
8532 * PARAMETERS : None
8533 *
8534 *==========================================================================*/
8535void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8536{
8537 List<stream_info_t *> newStreamInfo;
8538
8539 /*clean up invalid streams*/
8540 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8541 it != mStreamInfo.end();) {
8542 if(((*it)->status) == INVALID){
8543 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8544 delete channel;
8545 free(*it);
8546 it = mStreamInfo.erase(it);
8547 } else {
8548 it++;
8549 }
8550 }
8551
8552 // Move preview/video/callback/snapshot streams into newList
8553 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8554 it != mStreamInfo.end();) {
8555 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8556 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8557 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8558 newStreamInfo.push_back(*it);
8559 it = mStreamInfo.erase(it);
8560 } else
8561 it++;
8562 }
8563 // Move raw streams into newList
8564 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8565 it != mStreamInfo.end();) {
8566 newStreamInfo.push_back(*it);
8567 it = mStreamInfo.erase(it);
8568 }
8569
8570 mStreamInfo = newStreamInfo;
8571}
8572
8573/*===========================================================================
8574 * FUNCTION : extractJpegMetadata
8575 *
8576 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8577 * JPEG metadata is cached in HAL, and return as part of capture
8578 * result when metadata is returned from camera daemon.
8579 *
8580 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8581 * @request: capture request
8582 *
8583 *==========================================================================*/
8584void QCamera3HardwareInterface::extractJpegMetadata(
8585 CameraMetadata& jpegMetadata,
8586 const camera3_capture_request_t *request)
8587{
8588 CameraMetadata frame_settings;
8589 frame_settings = request->settings;
8590
8591 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8592 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8593 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8594 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8595
8596 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8597 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8598 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8599 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8600
8601 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8602 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8603 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8604 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8605
8606 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8607 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8608 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8609 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8610
8611 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8612 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8613 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8614 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8615
8616 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8617 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8618 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8619 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8620
8621 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8622 int32_t thumbnail_size[2];
8623 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8624 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8625 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8626 int32_t orientation =
8627 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008628 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008629 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8630 int32_t temp;
8631 temp = thumbnail_size[0];
8632 thumbnail_size[0] = thumbnail_size[1];
8633 thumbnail_size[1] = temp;
8634 }
8635 }
8636 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8637 thumbnail_size,
8638 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8639 }
8640
8641}
8642
8643/*===========================================================================
8644 * FUNCTION : convertToRegions
8645 *
8646 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8647 *
8648 * PARAMETERS :
8649 * @rect : cam_rect_t struct to convert
8650 * @region : int32_t destination array
8651 * @weight : if we are converting from cam_area_t, weight is valid
8652 * else weight = -1
8653 *
8654 *==========================================================================*/
8655void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8656 int32_t *region, int weight)
8657{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008658 region[FACE_LEFT] = rect.left;
8659 region[FACE_TOP] = rect.top;
8660 region[FACE_RIGHT] = rect.left + rect.width;
8661 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008662 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008663 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008664 }
8665}
8666
8667/*===========================================================================
8668 * FUNCTION : convertFromRegions
8669 *
8670 * DESCRIPTION: helper method to convert from array to cam_rect_t
8671 *
8672 * PARAMETERS :
8673 * @rect : cam_rect_t struct to convert
8674 * @region : int32_t destination array
8675 * @weight : if we are converting from cam_area_t, weight is valid
8676 * else weight = -1
8677 *
8678 *==========================================================================*/
8679void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008680 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008681{
Thierry Strudel3d639192016-09-09 11:52:26 -07008682 int32_t x_min = frame_settings.find(tag).data.i32[0];
8683 int32_t y_min = frame_settings.find(tag).data.i32[1];
8684 int32_t x_max = frame_settings.find(tag).data.i32[2];
8685 int32_t y_max = frame_settings.find(tag).data.i32[3];
8686 roi.weight = frame_settings.find(tag).data.i32[4];
8687 roi.rect.left = x_min;
8688 roi.rect.top = y_min;
8689 roi.rect.width = x_max - x_min;
8690 roi.rect.height = y_max - y_min;
8691}
8692
8693/*===========================================================================
8694 * FUNCTION : resetIfNeededROI
8695 *
8696 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8697 * crop region
8698 *
8699 * PARAMETERS :
8700 * @roi : cam_area_t struct to resize
8701 * @scalerCropRegion : cam_crop_region_t region to compare against
8702 *
8703 *
8704 *==========================================================================*/
8705bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8706 const cam_crop_region_t* scalerCropRegion)
8707{
8708 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8709 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8710 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8711 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8712
8713 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8714 * without having this check the calculations below to validate if the roi
8715 * is inside scalar crop region will fail resulting in the roi not being
8716 * reset causing algorithm to continue to use stale roi window
8717 */
8718 if (roi->weight == 0) {
8719 return true;
8720 }
8721
8722 if ((roi_x_max < scalerCropRegion->left) ||
8723 // right edge of roi window is left of scalar crop's left edge
8724 (roi_y_max < scalerCropRegion->top) ||
8725 // bottom edge of roi window is above scalar crop's top edge
8726 (roi->rect.left > crop_x_max) ||
8727 // left edge of roi window is beyond(right) of scalar crop's right edge
8728 (roi->rect.top > crop_y_max)){
8729 // top edge of roi windo is above scalar crop's top edge
8730 return false;
8731 }
8732 if (roi->rect.left < scalerCropRegion->left) {
8733 roi->rect.left = scalerCropRegion->left;
8734 }
8735 if (roi->rect.top < scalerCropRegion->top) {
8736 roi->rect.top = scalerCropRegion->top;
8737 }
8738 if (roi_x_max > crop_x_max) {
8739 roi_x_max = crop_x_max;
8740 }
8741 if (roi_y_max > crop_y_max) {
8742 roi_y_max = crop_y_max;
8743 }
8744 roi->rect.width = roi_x_max - roi->rect.left;
8745 roi->rect.height = roi_y_max - roi->rect.top;
8746 return true;
8747}
8748
8749/*===========================================================================
8750 * FUNCTION : convertLandmarks
8751 *
8752 * DESCRIPTION: helper method to extract the landmarks from face detection info
8753 *
8754 * PARAMETERS :
8755 * @landmark_data : input landmark data to be converted
8756 * @landmarks : int32_t destination array
8757 *
8758 *
8759 *==========================================================================*/
8760void QCamera3HardwareInterface::convertLandmarks(
8761 cam_face_landmarks_info_t landmark_data,
8762 int32_t *landmarks)
8763{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008764 if (landmark_data.is_left_eye_valid) {
8765 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8766 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8767 } else {
8768 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8769 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8770 }
8771
8772 if (landmark_data.is_right_eye_valid) {
8773 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8774 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8775 } else {
8776 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8777 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8778 }
8779
8780 if (landmark_data.is_mouth_valid) {
8781 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8782 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8783 } else {
8784 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8785 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8786 }
8787}
8788
8789/*===========================================================================
8790 * FUNCTION : setInvalidLandmarks
8791 *
8792 * DESCRIPTION: helper method to set invalid landmarks
8793 *
8794 * PARAMETERS :
8795 * @landmarks : int32_t destination array
8796 *
8797 *
8798 *==========================================================================*/
8799void QCamera3HardwareInterface::setInvalidLandmarks(
8800 int32_t *landmarks)
8801{
8802 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8803 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8804 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8805 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8806 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8807 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008808}
8809
8810#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008811
8812/*===========================================================================
8813 * FUNCTION : getCapabilities
8814 *
8815 * DESCRIPTION: query camera capability from back-end
8816 *
8817 * PARAMETERS :
8818 * @ops : mm-interface ops structure
8819 * @cam_handle : camera handle for which we need capability
8820 *
8821 * RETURN : ptr type of capability structure
8822 * capability for success
8823 * NULL for failure
8824 *==========================================================================*/
8825cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8826 uint32_t cam_handle)
8827{
8828 int rc = NO_ERROR;
8829 QCamera3HeapMemory *capabilityHeap = NULL;
8830 cam_capability_t *cap_ptr = NULL;
8831
8832 if (ops == NULL) {
8833 LOGE("Invalid arguments");
8834 return NULL;
8835 }
8836
8837 capabilityHeap = new QCamera3HeapMemory(1);
8838 if (capabilityHeap == NULL) {
8839 LOGE("creation of capabilityHeap failed");
8840 return NULL;
8841 }
8842
8843 /* Allocate memory for capability buffer */
8844 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8845 if(rc != OK) {
8846 LOGE("No memory for cappability");
8847 goto allocate_failed;
8848 }
8849
8850 /* Map memory for capability buffer */
8851 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8852
8853 rc = ops->map_buf(cam_handle,
8854 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8855 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8856 if(rc < 0) {
8857 LOGE("failed to map capability buffer");
8858 rc = FAILED_TRANSACTION;
8859 goto map_failed;
8860 }
8861
8862 /* Query Capability */
8863 rc = ops->query_capability(cam_handle);
8864 if(rc < 0) {
8865 LOGE("failed to query capability");
8866 rc = FAILED_TRANSACTION;
8867 goto query_failed;
8868 }
8869
8870 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8871 if (cap_ptr == NULL) {
8872 LOGE("out of memory");
8873 rc = NO_MEMORY;
8874 goto query_failed;
8875 }
8876
8877 memset(cap_ptr, 0, sizeof(cam_capability_t));
8878 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8879
8880 int index;
8881 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8882 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8883 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8884 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8885 }
8886
8887query_failed:
8888 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8889map_failed:
8890 capabilityHeap->deallocate();
8891allocate_failed:
8892 delete capabilityHeap;
8893
8894 if (rc != NO_ERROR) {
8895 return NULL;
8896 } else {
8897 return cap_ptr;
8898 }
8899}
8900
Thierry Strudel3d639192016-09-09 11:52:26 -07008901/*===========================================================================
8902 * FUNCTION : initCapabilities
8903 *
8904 * DESCRIPTION: initialize camera capabilities in static data struct
8905 *
8906 * PARAMETERS :
8907 * @cameraId : camera Id
8908 *
8909 * RETURN : int32_t type of status
8910 * NO_ERROR -- success
8911 * none-zero failure code
8912 *==========================================================================*/
8913int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8914{
8915 int rc = 0;
8916 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008917 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008918
8919 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8920 if (rc) {
8921 LOGE("camera_open failed. rc = %d", rc);
8922 goto open_failed;
8923 }
8924 if (!cameraHandle) {
8925 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8926 goto open_failed;
8927 }
8928
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008929 handle = get_main_camera_handle(cameraHandle->camera_handle);
8930 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8931 if (gCamCapability[cameraId] == NULL) {
8932 rc = FAILED_TRANSACTION;
8933 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008934 }
8935
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008936 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008937 if (is_dual_camera_by_idx(cameraId)) {
8938 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8939 gCamCapability[cameraId]->aux_cam_cap =
8940 getCapabilities(cameraHandle->ops, handle);
8941 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8942 rc = FAILED_TRANSACTION;
8943 free(gCamCapability[cameraId]);
8944 goto failed_op;
8945 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008946
8947 // Copy the main camera capability to main_cam_cap struct
8948 gCamCapability[cameraId]->main_cam_cap =
8949 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8950 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8951 LOGE("out of memory");
8952 rc = NO_MEMORY;
8953 goto failed_op;
8954 }
8955 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8956 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008957 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008958failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008959 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8960 cameraHandle = NULL;
8961open_failed:
8962 return rc;
8963}
8964
8965/*==========================================================================
8966 * FUNCTION : get3Aversion
8967 *
8968 * DESCRIPTION: get the Q3A S/W version
8969 *
8970 * PARAMETERS :
8971 * @sw_version: Reference of Q3A structure which will hold version info upon
8972 * return
8973 *
8974 * RETURN : None
8975 *
8976 *==========================================================================*/
8977void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8978{
8979 if(gCamCapability[mCameraId])
8980 sw_version = gCamCapability[mCameraId]->q3a_version;
8981 else
8982 LOGE("Capability structure NULL!");
8983}
8984
8985
8986/*===========================================================================
8987 * FUNCTION : initParameters
8988 *
8989 * DESCRIPTION: initialize camera parameters
8990 *
8991 * PARAMETERS :
8992 *
8993 * RETURN : int32_t type of status
8994 * NO_ERROR -- success
8995 * none-zero failure code
8996 *==========================================================================*/
8997int QCamera3HardwareInterface::initParameters()
8998{
8999 int rc = 0;
9000
9001 //Allocate Set Param Buffer
9002 mParamHeap = new QCamera3HeapMemory(1);
9003 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9004 if(rc != OK) {
9005 rc = NO_MEMORY;
9006 LOGE("Failed to allocate SETPARM Heap memory");
9007 delete mParamHeap;
9008 mParamHeap = NULL;
9009 return rc;
9010 }
9011
9012 //Map memory for parameters buffer
9013 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9014 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9015 mParamHeap->getFd(0),
9016 sizeof(metadata_buffer_t),
9017 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9018 if(rc < 0) {
9019 LOGE("failed to map SETPARM buffer");
9020 rc = FAILED_TRANSACTION;
9021 mParamHeap->deallocate();
9022 delete mParamHeap;
9023 mParamHeap = NULL;
9024 return rc;
9025 }
9026
9027 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9028
9029 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9030 return rc;
9031}
9032
9033/*===========================================================================
9034 * FUNCTION : deinitParameters
9035 *
9036 * DESCRIPTION: de-initialize camera parameters
9037 *
9038 * PARAMETERS :
9039 *
9040 * RETURN : NONE
9041 *==========================================================================*/
9042void QCamera3HardwareInterface::deinitParameters()
9043{
9044 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9045 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9046
9047 mParamHeap->deallocate();
9048 delete mParamHeap;
9049 mParamHeap = NULL;
9050
9051 mParameters = NULL;
9052
9053 free(mPrevParameters);
9054 mPrevParameters = NULL;
9055}
9056
9057/*===========================================================================
9058 * FUNCTION : calcMaxJpegSize
9059 *
9060 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9061 *
9062 * PARAMETERS :
9063 *
9064 * RETURN : max_jpeg_size
9065 *==========================================================================*/
9066size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9067{
9068 size_t max_jpeg_size = 0;
9069 size_t temp_width, temp_height;
9070 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9071 MAX_SIZES_CNT);
9072 for (size_t i = 0; i < count; i++) {
9073 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9074 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9075 if (temp_width * temp_height > max_jpeg_size ) {
9076 max_jpeg_size = temp_width * temp_height;
9077 }
9078 }
9079 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9080 return max_jpeg_size;
9081}
9082
9083/*===========================================================================
9084 * FUNCTION : getMaxRawSize
9085 *
9086 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9087 *
9088 * PARAMETERS :
9089 *
9090 * RETURN : Largest supported Raw Dimension
9091 *==========================================================================*/
9092cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9093{
9094 int max_width = 0;
9095 cam_dimension_t maxRawSize;
9096
9097 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9098 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9099 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9100 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9101 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9102 }
9103 }
9104 return maxRawSize;
9105}
9106
9107
9108/*===========================================================================
9109 * FUNCTION : calcMaxJpegDim
9110 *
9111 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9112 *
9113 * PARAMETERS :
9114 *
9115 * RETURN : max_jpeg_dim
9116 *==========================================================================*/
9117cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9118{
9119 cam_dimension_t max_jpeg_dim;
9120 cam_dimension_t curr_jpeg_dim;
9121 max_jpeg_dim.width = 0;
9122 max_jpeg_dim.height = 0;
9123 curr_jpeg_dim.width = 0;
9124 curr_jpeg_dim.height = 0;
9125 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9126 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9127 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9128 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9129 max_jpeg_dim.width * max_jpeg_dim.height ) {
9130 max_jpeg_dim.width = curr_jpeg_dim.width;
9131 max_jpeg_dim.height = curr_jpeg_dim.height;
9132 }
9133 }
9134 return max_jpeg_dim;
9135}
9136
9137/*===========================================================================
9138 * FUNCTION : addStreamConfig
9139 *
9140 * DESCRIPTION: adds the stream configuration to the array
9141 *
9142 * PARAMETERS :
9143 * @available_stream_configs : pointer to stream configuration array
9144 * @scalar_format : scalar format
9145 * @dim : configuration dimension
9146 * @config_type : input or output configuration type
9147 *
9148 * RETURN : NONE
9149 *==========================================================================*/
9150void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9151 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9152{
9153 available_stream_configs.add(scalar_format);
9154 available_stream_configs.add(dim.width);
9155 available_stream_configs.add(dim.height);
9156 available_stream_configs.add(config_type);
9157}
9158
9159/*===========================================================================
9160 * FUNCTION : suppportBurstCapture
9161 *
9162 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9163 *
9164 * PARAMETERS :
9165 * @cameraId : camera Id
9166 *
9167 * RETURN : true if camera supports BURST_CAPTURE
9168 * false otherwise
9169 *==========================================================================*/
9170bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9171{
9172 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9173 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9174 const int32_t highResWidth = 3264;
9175 const int32_t highResHeight = 2448;
9176
9177 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9178 // Maximum resolution images cannot be captured at >= 10fps
9179 // -> not supporting BURST_CAPTURE
9180 return false;
9181 }
9182
9183 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9184 // Maximum resolution images can be captured at >= 20fps
9185 // --> supporting BURST_CAPTURE
9186 return true;
9187 }
9188
9189 // Find the smallest highRes resolution, or largest resolution if there is none
9190 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9191 MAX_SIZES_CNT);
9192 size_t highRes = 0;
9193 while ((highRes + 1 < totalCnt) &&
9194 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9195 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9196 highResWidth * highResHeight)) {
9197 highRes++;
9198 }
9199 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9200 return true;
9201 } else {
9202 return false;
9203 }
9204}
9205
9206/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009207 * FUNCTION : getPDStatIndex
9208 *
9209 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9210 *
9211 * PARAMETERS :
9212 * @caps : camera capabilities
9213 *
9214 * RETURN : int32_t type
9215 * non-negative - on success
9216 * -1 - on failure
9217 *==========================================================================*/
9218int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9219 if (nullptr == caps) {
9220 return -1;
9221 }
9222
9223 uint32_t metaRawCount = caps->meta_raw_channel_count;
9224 int32_t ret = -1;
9225 for (size_t i = 0; i < metaRawCount; i++) {
9226 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9227 ret = i;
9228 break;
9229 }
9230 }
9231
9232 return ret;
9233}
9234
9235/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009236 * FUNCTION : initStaticMetadata
9237 *
9238 * DESCRIPTION: initialize the static metadata
9239 *
9240 * PARAMETERS :
9241 * @cameraId : camera Id
9242 *
9243 * RETURN : int32_t type of status
9244 * 0 -- success
9245 * non-zero failure code
9246 *==========================================================================*/
9247int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9248{
9249 int rc = 0;
9250 CameraMetadata staticInfo;
9251 size_t count = 0;
9252 bool limitedDevice = false;
9253 char prop[PROPERTY_VALUE_MAX];
9254 bool supportBurst = false;
9255
9256 supportBurst = supportBurstCapture(cameraId);
9257
9258 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9259 * guaranteed or if min fps of max resolution is less than 20 fps, its
9260 * advertised as limited device*/
9261 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9262 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9263 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9264 !supportBurst;
9265
9266 uint8_t supportedHwLvl = limitedDevice ?
9267 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009268#ifndef USE_HAL_3_3
9269 // LEVEL_3 - This device will support level 3.
9270 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9271#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009272 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009273#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009274
9275 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9276 &supportedHwLvl, 1);
9277
9278 bool facingBack = false;
9279 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9280 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9281 facingBack = true;
9282 }
9283 /*HAL 3 only*/
9284 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9285 &gCamCapability[cameraId]->min_focus_distance, 1);
9286
9287 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9288 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9289
9290 /*should be using focal lengths but sensor doesn't provide that info now*/
9291 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9292 &gCamCapability[cameraId]->focal_length,
9293 1);
9294
9295 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9296 gCamCapability[cameraId]->apertures,
9297 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9298
9299 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9300 gCamCapability[cameraId]->filter_densities,
9301 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9302
9303
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009304 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9305 size_t mode_count =
9306 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9307 for (size_t i = 0; i < mode_count; i++) {
9308 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009310 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009311 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009312
9313 int32_t lens_shading_map_size[] = {
9314 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9315 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9316 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9317 lens_shading_map_size,
9318 sizeof(lens_shading_map_size)/sizeof(int32_t));
9319
9320 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9321 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9322
9323 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9324 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9325
9326 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9327 &gCamCapability[cameraId]->max_frame_duration, 1);
9328
9329 camera_metadata_rational baseGainFactor = {
9330 gCamCapability[cameraId]->base_gain_factor.numerator,
9331 gCamCapability[cameraId]->base_gain_factor.denominator};
9332 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9333 &baseGainFactor, 1);
9334
9335 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9336 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9337
9338 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9339 gCamCapability[cameraId]->pixel_array_size.height};
9340 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9341 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9342
9343 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9344 gCamCapability[cameraId]->active_array_size.top,
9345 gCamCapability[cameraId]->active_array_size.width,
9346 gCamCapability[cameraId]->active_array_size.height};
9347 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9348 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9349
9350 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9351 &gCamCapability[cameraId]->white_level, 1);
9352
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009353 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9354 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9355 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009356 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009357 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009358
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009359#ifndef USE_HAL_3_3
9360 bool hasBlackRegions = false;
9361 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9362 LOGW("black_region_count: %d is bounded to %d",
9363 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9364 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9365 }
9366 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9367 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9368 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9369 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9370 }
9371 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9372 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9373 hasBlackRegions = true;
9374 }
9375#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009376 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9377 &gCamCapability[cameraId]->flash_charge_duration, 1);
9378
9379 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9380 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9381
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009382 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9383 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9384 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009385 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9386 &timestampSource, 1);
9387
Thierry Strudel54dc9782017-02-15 12:12:10 -08009388 //update histogram vendor data
9389 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009390 &gCamCapability[cameraId]->histogram_size, 1);
9391
Thierry Strudel54dc9782017-02-15 12:12:10 -08009392 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009393 &gCamCapability[cameraId]->max_histogram_count, 1);
9394
Shuzhen Wang14415f52016-11-16 18:26:18 -08009395 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9396 //so that app can request fewer number of bins than the maximum supported.
9397 std::vector<int32_t> histBins;
9398 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9399 histBins.push_back(maxHistBins);
9400 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9401 (maxHistBins & 0x1) == 0) {
9402 histBins.push_back(maxHistBins >> 1);
9403 maxHistBins >>= 1;
9404 }
9405 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9406 histBins.data(), histBins.size());
9407
Thierry Strudel3d639192016-09-09 11:52:26 -07009408 int32_t sharpness_map_size[] = {
9409 gCamCapability[cameraId]->sharpness_map_size.width,
9410 gCamCapability[cameraId]->sharpness_map_size.height};
9411
9412 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9413 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9414
9415 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9416 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9417
Emilian Peev0f3c3162017-03-15 12:57:46 +00009418 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9419 if (0 <= indexPD) {
9420 // Advertise PD stats data as part of the Depth capabilities
9421 int32_t depthWidth =
9422 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9423 int32_t depthHeight =
9424 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009425 int32_t depthStride =
9426 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009427 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9428 assert(0 < depthSamplesCount);
9429 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9430 &depthSamplesCount, 1);
9431
9432 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9433 depthHeight,
9434 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9435 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9436 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9437 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9438 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9439
9440 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9441 depthHeight, 33333333,
9442 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9443 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9444 depthMinDuration,
9445 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9446
9447 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9448 depthHeight, 0,
9449 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9450 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9451 depthStallDuration,
9452 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9453
9454 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9455 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009456
9457 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9458 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9459 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009460 }
9461
Thierry Strudel3d639192016-09-09 11:52:26 -07009462 int32_t scalar_formats[] = {
9463 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9464 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9465 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9466 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9467 HAL_PIXEL_FORMAT_RAW10,
9468 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009469 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9470 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9471 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009472
9473 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9474 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9475 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9476 count, MAX_SIZES_CNT, available_processed_sizes);
9477 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9478 available_processed_sizes, count * 2);
9479
9480 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9481 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9482 makeTable(gCamCapability[cameraId]->raw_dim,
9483 count, MAX_SIZES_CNT, available_raw_sizes);
9484 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9485 available_raw_sizes, count * 2);
9486
9487 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9488 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9489 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9490 count, MAX_SIZES_CNT, available_fps_ranges);
9491 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9492 available_fps_ranges, count * 2);
9493
9494 camera_metadata_rational exposureCompensationStep = {
9495 gCamCapability[cameraId]->exp_compensation_step.numerator,
9496 gCamCapability[cameraId]->exp_compensation_step.denominator};
9497 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9498 &exposureCompensationStep, 1);
9499
9500 Vector<uint8_t> availableVstabModes;
9501 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9502 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009503 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009504 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009505 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009506 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009507 count = IS_TYPE_MAX;
9508 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9509 for (size_t i = 0; i < count; i++) {
9510 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9511 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9512 eisSupported = true;
9513 break;
9514 }
9515 }
9516 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009517 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9518 }
9519 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9520 availableVstabModes.array(), availableVstabModes.size());
9521
9522 /*HAL 1 and HAL 3 common*/
9523 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9524 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9525 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009526 // Cap the max zoom to the max preferred value
9527 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009528 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9529 &maxZoom, 1);
9530
9531 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9532 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9533
9534 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9535 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9536 max3aRegions[2] = 0; /* AF not supported */
9537 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9538 max3aRegions, 3);
9539
9540 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9541 memset(prop, 0, sizeof(prop));
9542 property_get("persist.camera.facedetect", prop, "1");
9543 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9544 LOGD("Support face detection mode: %d",
9545 supportedFaceDetectMode);
9546
9547 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009548 /* support mode should be OFF if max number of face is 0 */
9549 if (maxFaces <= 0) {
9550 supportedFaceDetectMode = 0;
9551 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009552 Vector<uint8_t> availableFaceDetectModes;
9553 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9554 if (supportedFaceDetectMode == 1) {
9555 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9556 } else if (supportedFaceDetectMode == 2) {
9557 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9558 } else if (supportedFaceDetectMode == 3) {
9559 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9560 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9561 } else {
9562 maxFaces = 0;
9563 }
9564 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9565 availableFaceDetectModes.array(),
9566 availableFaceDetectModes.size());
9567 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9568 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009569 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9570 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9571 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009572
9573 int32_t exposureCompensationRange[] = {
9574 gCamCapability[cameraId]->exposure_compensation_min,
9575 gCamCapability[cameraId]->exposure_compensation_max};
9576 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9577 exposureCompensationRange,
9578 sizeof(exposureCompensationRange)/sizeof(int32_t));
9579
9580 uint8_t lensFacing = (facingBack) ?
9581 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9582 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9583
9584 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9585 available_thumbnail_sizes,
9586 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9587
9588 /*all sizes will be clubbed into this tag*/
9589 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9590 /*android.scaler.availableStreamConfigurations*/
9591 Vector<int32_t> available_stream_configs;
9592 cam_dimension_t active_array_dim;
9593 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9594 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009595
9596 /*advertise list of input dimensions supported based on below property.
9597 By default all sizes upto 5MP will be advertised.
9598 Note that the setprop resolution format should be WxH.
9599 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9600 To list all supported sizes, setprop needs to be set with "0x0" */
9601 cam_dimension_t minInputSize = {2592,1944}; //5MP
9602 memset(prop, 0, sizeof(prop));
9603 property_get("persist.camera.input.minsize", prop, "2592x1944");
9604 if (strlen(prop) > 0) {
9605 char *saveptr = NULL;
9606 char *token = strtok_r(prop, "x", &saveptr);
9607 if (token != NULL) {
9608 minInputSize.width = atoi(token);
9609 }
9610 token = strtok_r(NULL, "x", &saveptr);
9611 if (token != NULL) {
9612 minInputSize.height = atoi(token);
9613 }
9614 }
9615
Thierry Strudel3d639192016-09-09 11:52:26 -07009616 /* Add input/output stream configurations for each scalar formats*/
9617 for (size_t j = 0; j < scalar_formats_count; j++) {
9618 switch (scalar_formats[j]) {
9619 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9620 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9621 case HAL_PIXEL_FORMAT_RAW10:
9622 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9623 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9624 addStreamConfig(available_stream_configs, scalar_formats[j],
9625 gCamCapability[cameraId]->raw_dim[i],
9626 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9627 }
9628 break;
9629 case HAL_PIXEL_FORMAT_BLOB:
9630 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9631 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9632 addStreamConfig(available_stream_configs, scalar_formats[j],
9633 gCamCapability[cameraId]->picture_sizes_tbl[i],
9634 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9635 }
9636 break;
9637 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9638 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9639 default:
9640 cam_dimension_t largest_picture_size;
9641 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9642 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9643 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9644 addStreamConfig(available_stream_configs, scalar_formats[j],
9645 gCamCapability[cameraId]->picture_sizes_tbl[i],
9646 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009647 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009648 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9649 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009650 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9651 >= minInputSize.width) || (gCamCapability[cameraId]->
9652 picture_sizes_tbl[i].height >= minInputSize.height)) {
9653 addStreamConfig(available_stream_configs, scalar_formats[j],
9654 gCamCapability[cameraId]->picture_sizes_tbl[i],
9655 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9656 }
9657 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009658 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009659
Thierry Strudel3d639192016-09-09 11:52:26 -07009660 break;
9661 }
9662 }
9663
9664 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9665 available_stream_configs.array(), available_stream_configs.size());
9666 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9667 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9668
9669 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9670 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9671
9672 /* android.scaler.availableMinFrameDurations */
9673 Vector<int64_t> available_min_durations;
9674 for (size_t j = 0; j < scalar_formats_count; j++) {
9675 switch (scalar_formats[j]) {
9676 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9677 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9678 case HAL_PIXEL_FORMAT_RAW10:
9679 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9680 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9681 available_min_durations.add(scalar_formats[j]);
9682 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9683 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9684 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9685 }
9686 break;
9687 default:
9688 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9689 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9690 available_min_durations.add(scalar_formats[j]);
9691 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9692 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9693 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9694 }
9695 break;
9696 }
9697 }
9698 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9699 available_min_durations.array(), available_min_durations.size());
9700
9701 Vector<int32_t> available_hfr_configs;
9702 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9703 int32_t fps = 0;
9704 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9705 case CAM_HFR_MODE_60FPS:
9706 fps = 60;
9707 break;
9708 case CAM_HFR_MODE_90FPS:
9709 fps = 90;
9710 break;
9711 case CAM_HFR_MODE_120FPS:
9712 fps = 120;
9713 break;
9714 case CAM_HFR_MODE_150FPS:
9715 fps = 150;
9716 break;
9717 case CAM_HFR_MODE_180FPS:
9718 fps = 180;
9719 break;
9720 case CAM_HFR_MODE_210FPS:
9721 fps = 210;
9722 break;
9723 case CAM_HFR_MODE_240FPS:
9724 fps = 240;
9725 break;
9726 case CAM_HFR_MODE_480FPS:
9727 fps = 480;
9728 break;
9729 case CAM_HFR_MODE_OFF:
9730 case CAM_HFR_MODE_MAX:
9731 default:
9732 break;
9733 }
9734
9735 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9736 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9737 /* For each HFR frame rate, need to advertise one variable fps range
9738 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9739 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9740 * set by the app. When video recording is started, [120, 120] is
9741 * set. This way sensor configuration does not change when recording
9742 * is started */
9743
9744 /* (width, height, fps_min, fps_max, batch_size_max) */
9745 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9746 j < MAX_SIZES_CNT; j++) {
9747 available_hfr_configs.add(
9748 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9749 available_hfr_configs.add(
9750 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9751 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9752 available_hfr_configs.add(fps);
9753 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9754
9755 /* (width, height, fps_min, fps_max, batch_size_max) */
9756 available_hfr_configs.add(
9757 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9758 available_hfr_configs.add(
9759 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9760 available_hfr_configs.add(fps);
9761 available_hfr_configs.add(fps);
9762 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9763 }
9764 }
9765 }
9766 //Advertise HFR capability only if the property is set
9767 memset(prop, 0, sizeof(prop));
9768 property_get("persist.camera.hal3hfr.enable", prop, "1");
9769 uint8_t hfrEnable = (uint8_t)atoi(prop);
9770
9771 if(hfrEnable && available_hfr_configs.array()) {
9772 staticInfo.update(
9773 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9774 available_hfr_configs.array(), available_hfr_configs.size());
9775 }
9776
9777 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9778 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9779 &max_jpeg_size, 1);
9780
9781 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9782 size_t size = 0;
9783 count = CAM_EFFECT_MODE_MAX;
9784 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9785 for (size_t i = 0; i < count; i++) {
9786 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9787 gCamCapability[cameraId]->supported_effects[i]);
9788 if (NAME_NOT_FOUND != val) {
9789 avail_effects[size] = (uint8_t)val;
9790 size++;
9791 }
9792 }
9793 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9794 avail_effects,
9795 size);
9796
9797 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9798 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9799 size_t supported_scene_modes_cnt = 0;
9800 count = CAM_SCENE_MODE_MAX;
9801 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9802 for (size_t i = 0; i < count; i++) {
9803 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9804 CAM_SCENE_MODE_OFF) {
9805 int val = lookupFwkName(SCENE_MODES_MAP,
9806 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9807 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009808
Thierry Strudel3d639192016-09-09 11:52:26 -07009809 if (NAME_NOT_FOUND != val) {
9810 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9811 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9812 supported_scene_modes_cnt++;
9813 }
9814 }
9815 }
9816 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9817 avail_scene_modes,
9818 supported_scene_modes_cnt);
9819
9820 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9821 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9822 supported_scene_modes_cnt,
9823 CAM_SCENE_MODE_MAX,
9824 scene_mode_overrides,
9825 supported_indexes,
9826 cameraId);
9827
9828 if (supported_scene_modes_cnt == 0) {
9829 supported_scene_modes_cnt = 1;
9830 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9831 }
9832
9833 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9834 scene_mode_overrides, supported_scene_modes_cnt * 3);
9835
9836 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9837 ANDROID_CONTROL_MODE_AUTO,
9838 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9839 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9840 available_control_modes,
9841 3);
9842
9843 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9844 size = 0;
9845 count = CAM_ANTIBANDING_MODE_MAX;
9846 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9847 for (size_t i = 0; i < count; i++) {
9848 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9849 gCamCapability[cameraId]->supported_antibandings[i]);
9850 if (NAME_NOT_FOUND != val) {
9851 avail_antibanding_modes[size] = (uint8_t)val;
9852 size++;
9853 }
9854
9855 }
9856 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9857 avail_antibanding_modes,
9858 size);
9859
9860 uint8_t avail_abberation_modes[] = {
9861 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9862 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9863 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9864 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9865 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9866 if (0 == count) {
9867 // If no aberration correction modes are available for a device, this advertise OFF mode
9868 size = 1;
9869 } else {
9870 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9871 // So, advertize all 3 modes if atleast any one mode is supported as per the
9872 // new M requirement
9873 size = 3;
9874 }
9875 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9876 avail_abberation_modes,
9877 size);
9878
9879 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9880 size = 0;
9881 count = CAM_FOCUS_MODE_MAX;
9882 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9883 for (size_t i = 0; i < count; i++) {
9884 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9885 gCamCapability[cameraId]->supported_focus_modes[i]);
9886 if (NAME_NOT_FOUND != val) {
9887 avail_af_modes[size] = (uint8_t)val;
9888 size++;
9889 }
9890 }
9891 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9892 avail_af_modes,
9893 size);
9894
9895 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9896 size = 0;
9897 count = CAM_WB_MODE_MAX;
9898 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9899 for (size_t i = 0; i < count; i++) {
9900 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9901 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9902 gCamCapability[cameraId]->supported_white_balances[i]);
9903 if (NAME_NOT_FOUND != val) {
9904 avail_awb_modes[size] = (uint8_t)val;
9905 size++;
9906 }
9907 }
9908 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9909 avail_awb_modes,
9910 size);
9911
9912 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9913 count = CAM_FLASH_FIRING_LEVEL_MAX;
9914 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9915 count);
9916 for (size_t i = 0; i < count; i++) {
9917 available_flash_levels[i] =
9918 gCamCapability[cameraId]->supported_firing_levels[i];
9919 }
9920 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9921 available_flash_levels, count);
9922
9923 uint8_t flashAvailable;
9924 if (gCamCapability[cameraId]->flash_available)
9925 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9926 else
9927 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9928 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9929 &flashAvailable, 1);
9930
9931 Vector<uint8_t> avail_ae_modes;
9932 count = CAM_AE_MODE_MAX;
9933 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9934 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009935 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9936 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9937 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9938 }
9939 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009940 }
9941 if (flashAvailable) {
9942 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9943 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9944 }
9945 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9946 avail_ae_modes.array(),
9947 avail_ae_modes.size());
9948
9949 int32_t sensitivity_range[2];
9950 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9951 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9952 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9953 sensitivity_range,
9954 sizeof(sensitivity_range) / sizeof(int32_t));
9955
9956 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9957 &gCamCapability[cameraId]->max_analog_sensitivity,
9958 1);
9959
9960 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9961 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9962 &sensor_orientation,
9963 1);
9964
9965 int32_t max_output_streams[] = {
9966 MAX_STALLING_STREAMS,
9967 MAX_PROCESSED_STREAMS,
9968 MAX_RAW_STREAMS};
9969 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9970 max_output_streams,
9971 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9972
9973 uint8_t avail_leds = 0;
9974 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9975 &avail_leds, 0);
9976
9977 uint8_t focus_dist_calibrated;
9978 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9979 gCamCapability[cameraId]->focus_dist_calibrated);
9980 if (NAME_NOT_FOUND != val) {
9981 focus_dist_calibrated = (uint8_t)val;
9982 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9983 &focus_dist_calibrated, 1);
9984 }
9985
9986 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9987 size = 0;
9988 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9989 MAX_TEST_PATTERN_CNT);
9990 for (size_t i = 0; i < count; i++) {
9991 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9992 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9993 if (NAME_NOT_FOUND != testpatternMode) {
9994 avail_testpattern_modes[size] = testpatternMode;
9995 size++;
9996 }
9997 }
9998 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9999 avail_testpattern_modes,
10000 size);
10001
10002 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10003 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10004 &max_pipeline_depth,
10005 1);
10006
10007 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10008 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10009 &partial_result_count,
10010 1);
10011
10012 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10013 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10014
10015 Vector<uint8_t> available_capabilities;
10016 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10017 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10018 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10019 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10020 if (supportBurst) {
10021 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10022 }
10023 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10024 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10025 if (hfrEnable && available_hfr_configs.array()) {
10026 available_capabilities.add(
10027 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10028 }
10029
10030 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10031 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10032 }
10033 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10034 available_capabilities.array(),
10035 available_capabilities.size());
10036
10037 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10038 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10039 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10040 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10041
10042 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10043 &aeLockAvailable, 1);
10044
10045 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10046 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10047 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10048 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10049
10050 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10051 &awbLockAvailable, 1);
10052
10053 int32_t max_input_streams = 1;
10054 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10055 &max_input_streams,
10056 1);
10057
10058 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10059 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10060 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10061 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10062 HAL_PIXEL_FORMAT_YCbCr_420_888};
10063 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10064 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10065
10066 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10067 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10068 &max_latency,
10069 1);
10070
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010071#ifndef USE_HAL_3_3
10072 int32_t isp_sensitivity_range[2];
10073 isp_sensitivity_range[0] =
10074 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10075 isp_sensitivity_range[1] =
10076 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10077 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10078 isp_sensitivity_range,
10079 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10080#endif
10081
Thierry Strudel3d639192016-09-09 11:52:26 -070010082 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10083 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10084 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10085 available_hot_pixel_modes,
10086 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10087
10088 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10089 ANDROID_SHADING_MODE_FAST,
10090 ANDROID_SHADING_MODE_HIGH_QUALITY};
10091 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10092 available_shading_modes,
10093 3);
10094
10095 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10096 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10097 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10098 available_lens_shading_map_modes,
10099 2);
10100
10101 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10102 ANDROID_EDGE_MODE_FAST,
10103 ANDROID_EDGE_MODE_HIGH_QUALITY,
10104 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10105 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10106 available_edge_modes,
10107 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10108
10109 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10110 ANDROID_NOISE_REDUCTION_MODE_FAST,
10111 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10112 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10113 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10114 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10115 available_noise_red_modes,
10116 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10117
10118 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10119 ANDROID_TONEMAP_MODE_FAST,
10120 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10121 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10122 available_tonemap_modes,
10123 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10124
10125 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10126 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10127 available_hot_pixel_map_modes,
10128 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10129
10130 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10131 gCamCapability[cameraId]->reference_illuminant1);
10132 if (NAME_NOT_FOUND != val) {
10133 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10134 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10135 }
10136
10137 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10138 gCamCapability[cameraId]->reference_illuminant2);
10139 if (NAME_NOT_FOUND != val) {
10140 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10141 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10142 }
10143
10144 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10145 (void *)gCamCapability[cameraId]->forward_matrix1,
10146 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10147
10148 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10149 (void *)gCamCapability[cameraId]->forward_matrix2,
10150 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10151
10152 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10153 (void *)gCamCapability[cameraId]->color_transform1,
10154 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10155
10156 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10157 (void *)gCamCapability[cameraId]->color_transform2,
10158 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10159
10160 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10161 (void *)gCamCapability[cameraId]->calibration_transform1,
10162 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10163
10164 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10165 (void *)gCamCapability[cameraId]->calibration_transform2,
10166 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10167
10168 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10169 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10170 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10171 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10172 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10173 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10174 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10175 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10176 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10177 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10178 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10179 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10180 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10181 ANDROID_JPEG_GPS_COORDINATES,
10182 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10183 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10184 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10185 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10186 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10187 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10188 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10189 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10190 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10191 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010192#ifndef USE_HAL_3_3
10193 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10194#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010195 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010196 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010197 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10198 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010199 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010200 /* DevCamDebug metadata request_keys_basic */
10201 DEVCAMDEBUG_META_ENABLE,
10202 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010203 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010204 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010205 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010206 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010207 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010208 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010209
10210 size_t request_keys_cnt =
10211 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10212 Vector<int32_t> available_request_keys;
10213 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10214 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10215 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10216 }
10217
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010218 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010219 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010220 }
10221
Thierry Strudel3d639192016-09-09 11:52:26 -070010222 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10223 available_request_keys.array(), available_request_keys.size());
10224
10225 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10226 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10227 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10228 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10229 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10230 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10231 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10232 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10233 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10234 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10235 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10236 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10237 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10238 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10239 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10240 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10241 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010242 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010243 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10244 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10245 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010246 ANDROID_STATISTICS_FACE_SCORES,
10247#ifndef USE_HAL_3_3
10248 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10249#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010250 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010251 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010252 // DevCamDebug metadata result_keys_basic
10253 DEVCAMDEBUG_META_ENABLE,
10254 // DevCamDebug metadata result_keys AF
10255 DEVCAMDEBUG_AF_LENS_POSITION,
10256 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10257 DEVCAMDEBUG_AF_TOF_DISTANCE,
10258 DEVCAMDEBUG_AF_LUMA,
10259 DEVCAMDEBUG_AF_HAF_STATE,
10260 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10261 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10262 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10263 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10264 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10265 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10266 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10267 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10268 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10269 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10270 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10271 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10272 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10273 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10274 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10275 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10276 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10277 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10278 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10279 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10280 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10281 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10282 // DevCamDebug metadata result_keys AEC
10283 DEVCAMDEBUG_AEC_TARGET_LUMA,
10284 DEVCAMDEBUG_AEC_COMP_LUMA,
10285 DEVCAMDEBUG_AEC_AVG_LUMA,
10286 DEVCAMDEBUG_AEC_CUR_LUMA,
10287 DEVCAMDEBUG_AEC_LINECOUNT,
10288 DEVCAMDEBUG_AEC_REAL_GAIN,
10289 DEVCAMDEBUG_AEC_EXP_INDEX,
10290 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010291 // DevCamDebug metadata result_keys zzHDR
10292 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10293 DEVCAMDEBUG_AEC_L_LINECOUNT,
10294 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10295 DEVCAMDEBUG_AEC_S_LINECOUNT,
10296 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10297 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10298 // DevCamDebug metadata result_keys ADRC
10299 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10300 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10301 DEVCAMDEBUG_AEC_GTM_RATIO,
10302 DEVCAMDEBUG_AEC_LTM_RATIO,
10303 DEVCAMDEBUG_AEC_LA_RATIO,
10304 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010305 // DevCamDebug metadata result_keys AEC MOTION
10306 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10307 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10308 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010309 // DevCamDebug metadata result_keys AWB
10310 DEVCAMDEBUG_AWB_R_GAIN,
10311 DEVCAMDEBUG_AWB_G_GAIN,
10312 DEVCAMDEBUG_AWB_B_GAIN,
10313 DEVCAMDEBUG_AWB_CCT,
10314 DEVCAMDEBUG_AWB_DECISION,
10315 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010316 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10317 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10318 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010319 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010320 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010321 };
10322
Thierry Strudel3d639192016-09-09 11:52:26 -070010323 size_t result_keys_cnt =
10324 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10325
10326 Vector<int32_t> available_result_keys;
10327 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10328 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10329 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10330 }
10331 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10332 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10333 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10334 }
10335 if (supportedFaceDetectMode == 1) {
10336 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10337 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10338 } else if ((supportedFaceDetectMode == 2) ||
10339 (supportedFaceDetectMode == 3)) {
10340 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10341 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10342 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010343#ifndef USE_HAL_3_3
10344 if (hasBlackRegions) {
10345 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10346 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10347 }
10348#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010349
10350 if (gExposeEnableZslKey) {
10351 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10352 }
10353
Thierry Strudel3d639192016-09-09 11:52:26 -070010354 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10355 available_result_keys.array(), available_result_keys.size());
10356
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010357 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010358 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10359 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10360 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10361 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10362 ANDROID_SCALER_CROPPING_TYPE,
10363 ANDROID_SYNC_MAX_LATENCY,
10364 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10365 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10366 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10367 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10368 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10369 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10370 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10371 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10372 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10373 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10374 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10375 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10376 ANDROID_LENS_FACING,
10377 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10378 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10379 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10380 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10381 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10382 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10383 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10384 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10385 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10386 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10387 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10388 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10389 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10390 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10391 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10392 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10393 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10394 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10395 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10396 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010397 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010398 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10399 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10400 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10401 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10402 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10403 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10404 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10405 ANDROID_CONTROL_AVAILABLE_MODES,
10406 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10407 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10408 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10409 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010410 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10411#ifndef USE_HAL_3_3
10412 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10413 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10414#endif
10415 };
10416
10417 Vector<int32_t> available_characteristics_keys;
10418 available_characteristics_keys.appendArray(characteristics_keys_basic,
10419 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10420#ifndef USE_HAL_3_3
10421 if (hasBlackRegions) {
10422 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10423 }
10424#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010425
10426 if (0 <= indexPD) {
10427 int32_t depthKeys[] = {
10428 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10429 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10430 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10431 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10432 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10433 };
10434 available_characteristics_keys.appendArray(depthKeys,
10435 sizeof(depthKeys) / sizeof(depthKeys[0]));
10436 }
10437
Thierry Strudel3d639192016-09-09 11:52:26 -070010438 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010439 available_characteristics_keys.array(),
10440 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010441
10442 /*available stall durations depend on the hw + sw and will be different for different devices */
10443 /*have to add for raw after implementation*/
10444 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10445 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10446
10447 Vector<int64_t> available_stall_durations;
10448 for (uint32_t j = 0; j < stall_formats_count; j++) {
10449 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10450 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10451 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10452 available_stall_durations.add(stall_formats[j]);
10453 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10454 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10455 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10456 }
10457 } else {
10458 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10459 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10460 available_stall_durations.add(stall_formats[j]);
10461 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10462 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10463 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10464 }
10465 }
10466 }
10467 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10468 available_stall_durations.array(),
10469 available_stall_durations.size());
10470
10471 //QCAMERA3_OPAQUE_RAW
10472 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10473 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10474 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10475 case LEGACY_RAW:
10476 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10477 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10478 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10479 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10480 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10481 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10482 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10483 break;
10484 case MIPI_RAW:
10485 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10486 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10487 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10488 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10489 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10490 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10491 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10492 break;
10493 default:
10494 LOGE("unknown opaque_raw_format %d",
10495 gCamCapability[cameraId]->opaque_raw_fmt);
10496 break;
10497 }
10498 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10499
10500 Vector<int32_t> strides;
10501 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10502 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10503 cam_stream_buf_plane_info_t buf_planes;
10504 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10505 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10506 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10507 &gCamCapability[cameraId]->padding_info, &buf_planes);
10508 strides.add(buf_planes.plane_info.mp[0].stride);
10509 }
10510 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10511 strides.size());
10512
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010513 //TBD: remove the following line once backend advertises zzHDR in feature mask
10514 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010515 //Video HDR default
10516 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10517 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010518 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010519 int32_t vhdr_mode[] = {
10520 QCAMERA3_VIDEO_HDR_MODE_OFF,
10521 QCAMERA3_VIDEO_HDR_MODE_ON};
10522
10523 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10524 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10525 vhdr_mode, vhdr_mode_count);
10526 }
10527
Thierry Strudel3d639192016-09-09 11:52:26 -070010528 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10529 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10530 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10531
10532 uint8_t isMonoOnly =
10533 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10534 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10535 &isMonoOnly, 1);
10536
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010537#ifndef USE_HAL_3_3
10538 Vector<int32_t> opaque_size;
10539 for (size_t j = 0; j < scalar_formats_count; j++) {
10540 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10541 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10542 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10543 cam_stream_buf_plane_info_t buf_planes;
10544
10545 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10546 &gCamCapability[cameraId]->padding_info, &buf_planes);
10547
10548 if (rc == 0) {
10549 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10550 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10551 opaque_size.add(buf_planes.plane_info.frame_len);
10552 }else {
10553 LOGE("raw frame calculation failed!");
10554 }
10555 }
10556 }
10557 }
10558
10559 if ((opaque_size.size() > 0) &&
10560 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10561 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10562 else
10563 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10564#endif
10565
Thierry Strudel04e026f2016-10-10 11:27:36 -070010566 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10567 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10568 size = 0;
10569 count = CAM_IR_MODE_MAX;
10570 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10571 for (size_t i = 0; i < count; i++) {
10572 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10573 gCamCapability[cameraId]->supported_ir_modes[i]);
10574 if (NAME_NOT_FOUND != val) {
10575 avail_ir_modes[size] = (int32_t)val;
10576 size++;
10577 }
10578 }
10579 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10580 avail_ir_modes, size);
10581 }
10582
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010583 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10584 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10585 size = 0;
10586 count = CAM_AEC_CONVERGENCE_MAX;
10587 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10588 for (size_t i = 0; i < count; i++) {
10589 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10590 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10591 if (NAME_NOT_FOUND != val) {
10592 available_instant_aec_modes[size] = (int32_t)val;
10593 size++;
10594 }
10595 }
10596 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10597 available_instant_aec_modes, size);
10598 }
10599
Thierry Strudel54dc9782017-02-15 12:12:10 -080010600 int32_t sharpness_range[] = {
10601 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10602 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10603 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10604
10605 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10606 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10607 size = 0;
10608 count = CAM_BINNING_CORRECTION_MODE_MAX;
10609 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10610 for (size_t i = 0; i < count; i++) {
10611 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10612 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10613 gCamCapability[cameraId]->supported_binning_modes[i]);
10614 if (NAME_NOT_FOUND != val) {
10615 avail_binning_modes[size] = (int32_t)val;
10616 size++;
10617 }
10618 }
10619 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10620 avail_binning_modes, size);
10621 }
10622
10623 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10624 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10625 size = 0;
10626 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10627 for (size_t i = 0; i < count; i++) {
10628 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10629 gCamCapability[cameraId]->supported_aec_modes[i]);
10630 if (NAME_NOT_FOUND != val)
10631 available_aec_modes[size++] = val;
10632 }
10633 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10634 available_aec_modes, size);
10635 }
10636
10637 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10638 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10639 size = 0;
10640 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10641 for (size_t i = 0; i < count; i++) {
10642 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10643 gCamCapability[cameraId]->supported_iso_modes[i]);
10644 if (NAME_NOT_FOUND != val)
10645 available_iso_modes[size++] = val;
10646 }
10647 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10648 available_iso_modes, size);
10649 }
10650
10651 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010652 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010653 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10654 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10655 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10656
10657 int32_t available_saturation_range[4];
10658 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10659 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10660 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10661 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10662 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10663 available_saturation_range, 4);
10664
10665 uint8_t is_hdr_values[2];
10666 is_hdr_values[0] = 0;
10667 is_hdr_values[1] = 1;
10668 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10669 is_hdr_values, 2);
10670
10671 float is_hdr_confidence_range[2];
10672 is_hdr_confidence_range[0] = 0.0;
10673 is_hdr_confidence_range[1] = 1.0;
10674 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10675 is_hdr_confidence_range, 2);
10676
Emilian Peev0a972ef2017-03-16 10:25:53 +000010677 size_t eepromLength = strnlen(
10678 reinterpret_cast<const char *>(
10679 gCamCapability[cameraId]->eeprom_version_info),
10680 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10681 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010682 char easelInfo[] = ",E:N";
10683 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10684 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10685 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010686 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10687 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010688 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010689 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010690 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10691 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10692 }
10693
Thierry Strudel3d639192016-09-09 11:52:26 -070010694 gStaticMetadata[cameraId] = staticInfo.release();
10695 return rc;
10696}
10697
10698/*===========================================================================
10699 * FUNCTION : makeTable
10700 *
10701 * DESCRIPTION: make a table of sizes
10702 *
10703 * PARAMETERS :
10704 *
10705 *
10706 *==========================================================================*/
10707void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10708 size_t max_size, int32_t *sizeTable)
10709{
10710 size_t j = 0;
10711 if (size > max_size) {
10712 size = max_size;
10713 }
10714 for (size_t i = 0; i < size; i++) {
10715 sizeTable[j] = dimTable[i].width;
10716 sizeTable[j+1] = dimTable[i].height;
10717 j+=2;
10718 }
10719}
10720
10721/*===========================================================================
10722 * FUNCTION : makeFPSTable
10723 *
10724 * DESCRIPTION: make a table of fps ranges
10725 *
10726 * PARAMETERS :
10727 *
10728 *==========================================================================*/
10729void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10730 size_t max_size, int32_t *fpsRangesTable)
10731{
10732 size_t j = 0;
10733 if (size > max_size) {
10734 size = max_size;
10735 }
10736 for (size_t i = 0; i < size; i++) {
10737 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10738 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10739 j+=2;
10740 }
10741}
10742
10743/*===========================================================================
10744 * FUNCTION : makeOverridesList
10745 *
10746 * DESCRIPTION: make a list of scene mode overrides
10747 *
10748 * PARAMETERS :
10749 *
10750 *
10751 *==========================================================================*/
10752void QCamera3HardwareInterface::makeOverridesList(
10753 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10754 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10755{
10756 /*daemon will give a list of overrides for all scene modes.
10757 However we should send the fwk only the overrides for the scene modes
10758 supported by the framework*/
10759 size_t j = 0;
10760 if (size > max_size) {
10761 size = max_size;
10762 }
10763 size_t focus_count = CAM_FOCUS_MODE_MAX;
10764 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10765 focus_count);
10766 for (size_t i = 0; i < size; i++) {
10767 bool supt = false;
10768 size_t index = supported_indexes[i];
10769 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10770 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10771 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10772 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10773 overridesTable[index].awb_mode);
10774 if (NAME_NOT_FOUND != val) {
10775 overridesList[j+1] = (uint8_t)val;
10776 }
10777 uint8_t focus_override = overridesTable[index].af_mode;
10778 for (size_t k = 0; k < focus_count; k++) {
10779 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10780 supt = true;
10781 break;
10782 }
10783 }
10784 if (supt) {
10785 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10786 focus_override);
10787 if (NAME_NOT_FOUND != val) {
10788 overridesList[j+2] = (uint8_t)val;
10789 }
10790 } else {
10791 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10792 }
10793 j+=3;
10794 }
10795}
10796
10797/*===========================================================================
10798 * FUNCTION : filterJpegSizes
10799 *
10800 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10801 * could be downscaled to
10802 *
10803 * PARAMETERS :
10804 *
10805 * RETURN : length of jpegSizes array
10806 *==========================================================================*/
10807
10808size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10809 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10810 uint8_t downscale_factor)
10811{
10812 if (0 == downscale_factor) {
10813 downscale_factor = 1;
10814 }
10815
10816 int32_t min_width = active_array_size.width / downscale_factor;
10817 int32_t min_height = active_array_size.height / downscale_factor;
10818 size_t jpegSizesCnt = 0;
10819 if (processedSizesCnt > maxCount) {
10820 processedSizesCnt = maxCount;
10821 }
10822 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10823 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10824 jpegSizes[jpegSizesCnt] = processedSizes[i];
10825 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10826 jpegSizesCnt += 2;
10827 }
10828 }
10829 return jpegSizesCnt;
10830}
10831
10832/*===========================================================================
10833 * FUNCTION : computeNoiseModelEntryS
10834 *
10835 * DESCRIPTION: function to map a given sensitivity to the S noise
10836 * model parameters in the DNG noise model.
10837 *
10838 * PARAMETERS : sens : the sensor sensitivity
10839 *
10840 ** RETURN : S (sensor amplification) noise
10841 *
10842 *==========================================================================*/
10843double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10844 double s = gCamCapability[mCameraId]->gradient_S * sens +
10845 gCamCapability[mCameraId]->offset_S;
10846 return ((s < 0.0) ? 0.0 : s);
10847}
10848
10849/*===========================================================================
10850 * FUNCTION : computeNoiseModelEntryO
10851 *
10852 * DESCRIPTION: function to map a given sensitivity to the O noise
10853 * model parameters in the DNG noise model.
10854 *
10855 * PARAMETERS : sens : the sensor sensitivity
10856 *
10857 ** RETURN : O (sensor readout) noise
10858 *
10859 *==========================================================================*/
10860double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10861 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10862 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10863 1.0 : (1.0 * sens / max_analog_sens);
10864 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10865 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10866 return ((o < 0.0) ? 0.0 : o);
10867}
10868
10869/*===========================================================================
10870 * FUNCTION : getSensorSensitivity
10871 *
10872 * DESCRIPTION: convert iso_mode to an integer value
10873 *
10874 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10875 *
10876 ** RETURN : sensitivity supported by sensor
10877 *
10878 *==========================================================================*/
10879int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10880{
10881 int32_t sensitivity;
10882
10883 switch (iso_mode) {
10884 case CAM_ISO_MODE_100:
10885 sensitivity = 100;
10886 break;
10887 case CAM_ISO_MODE_200:
10888 sensitivity = 200;
10889 break;
10890 case CAM_ISO_MODE_400:
10891 sensitivity = 400;
10892 break;
10893 case CAM_ISO_MODE_800:
10894 sensitivity = 800;
10895 break;
10896 case CAM_ISO_MODE_1600:
10897 sensitivity = 1600;
10898 break;
10899 default:
10900 sensitivity = -1;
10901 break;
10902 }
10903 return sensitivity;
10904}
10905
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010906int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010907 if (gEaselManagerClient == nullptr) {
10908 gEaselManagerClient = EaselManagerClient::create();
10909 if (gEaselManagerClient == nullptr) {
10910 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10911 return -ENODEV;
10912 }
10913 }
10914
10915 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010916 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10917 // to connect to Easel.
10918 bool doNotpowerOnEasel =
10919 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10920
10921 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010922 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10923 return OK;
10924 }
10925
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010926 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010927 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010928 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010929 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010930 return res;
10931 }
10932
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010933 EaselManagerClientOpened = true;
10934
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010935 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010936 if (res != OK) {
10937 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10938 }
10939
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010940 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010941 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010942
10943 // Expose enableZsl key only when HDR+ mode is enabled.
10944 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010945 }
10946
10947 return OK;
10948}
10949
Thierry Strudel3d639192016-09-09 11:52:26 -070010950/*===========================================================================
10951 * FUNCTION : getCamInfo
10952 *
10953 * DESCRIPTION: query camera capabilities
10954 *
10955 * PARAMETERS :
10956 * @cameraId : camera Id
10957 * @info : camera info struct to be filled in with camera capabilities
10958 *
10959 * RETURN : int type of status
10960 * NO_ERROR -- success
10961 * none-zero failure code
10962 *==========================================================================*/
10963int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10964 struct camera_info *info)
10965{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010966 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010967 int rc = 0;
10968
10969 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010970
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010971 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070010972 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010973 rc = initHdrPlusClientLocked();
10974 if (rc != OK) {
10975 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10976 pthread_mutex_unlock(&gCamLock);
10977 return rc;
10978 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010979 }
10980
Thierry Strudel3d639192016-09-09 11:52:26 -070010981 if (NULL == gCamCapability[cameraId]) {
10982 rc = initCapabilities(cameraId);
10983 if (rc < 0) {
10984 pthread_mutex_unlock(&gCamLock);
10985 return rc;
10986 }
10987 }
10988
10989 if (NULL == gStaticMetadata[cameraId]) {
10990 rc = initStaticMetadata(cameraId);
10991 if (rc < 0) {
10992 pthread_mutex_unlock(&gCamLock);
10993 return rc;
10994 }
10995 }
10996
10997 switch(gCamCapability[cameraId]->position) {
10998 case CAM_POSITION_BACK:
10999 case CAM_POSITION_BACK_AUX:
11000 info->facing = CAMERA_FACING_BACK;
11001 break;
11002
11003 case CAM_POSITION_FRONT:
11004 case CAM_POSITION_FRONT_AUX:
11005 info->facing = CAMERA_FACING_FRONT;
11006 break;
11007
11008 default:
11009 LOGE("Unknown position type %d for camera id:%d",
11010 gCamCapability[cameraId]->position, cameraId);
11011 rc = -1;
11012 break;
11013 }
11014
11015
11016 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011017#ifndef USE_HAL_3_3
11018 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11019#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011020 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011021#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011022 info->static_camera_characteristics = gStaticMetadata[cameraId];
11023
11024 //For now assume both cameras can operate independently.
11025 info->conflicting_devices = NULL;
11026 info->conflicting_devices_length = 0;
11027
11028 //resource cost is 100 * MIN(1.0, m/M),
11029 //where m is throughput requirement with maximum stream configuration
11030 //and M is CPP maximum throughput.
11031 float max_fps = 0.0;
11032 for (uint32_t i = 0;
11033 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11034 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11035 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11036 }
11037 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11038 gCamCapability[cameraId]->active_array_size.width *
11039 gCamCapability[cameraId]->active_array_size.height * max_fps /
11040 gCamCapability[cameraId]->max_pixel_bandwidth;
11041 info->resource_cost = 100 * MIN(1.0, ratio);
11042 LOGI("camera %d resource cost is %d", cameraId,
11043 info->resource_cost);
11044
11045 pthread_mutex_unlock(&gCamLock);
11046 return rc;
11047}
11048
11049/*===========================================================================
11050 * FUNCTION : translateCapabilityToMetadata
11051 *
11052 * DESCRIPTION: translate the capability into camera_metadata_t
11053 *
11054 * PARAMETERS : type of the request
11055 *
11056 *
11057 * RETURN : success: camera_metadata_t*
11058 * failure: NULL
11059 *
11060 *==========================================================================*/
11061camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11062{
11063 if (mDefaultMetadata[type] != NULL) {
11064 return mDefaultMetadata[type];
11065 }
11066 //first time we are handling this request
11067 //fill up the metadata structure using the wrapper class
11068 CameraMetadata settings;
11069 //translate from cam_capability_t to camera_metadata_tag_t
11070 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11071 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11072 int32_t defaultRequestID = 0;
11073 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11074
11075 /* OIS disable */
11076 char ois_prop[PROPERTY_VALUE_MAX];
11077 memset(ois_prop, 0, sizeof(ois_prop));
11078 property_get("persist.camera.ois.disable", ois_prop, "0");
11079 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11080
11081 /* Force video to use OIS */
11082 char videoOisProp[PROPERTY_VALUE_MAX];
11083 memset(videoOisProp, 0, sizeof(videoOisProp));
11084 property_get("persist.camera.ois.video", videoOisProp, "1");
11085 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011086
11087 // Hybrid AE enable/disable
11088 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11089 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11090 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11091 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11092
Thierry Strudel3d639192016-09-09 11:52:26 -070011093 uint8_t controlIntent = 0;
11094 uint8_t focusMode;
11095 uint8_t vsMode;
11096 uint8_t optStabMode;
11097 uint8_t cacMode;
11098 uint8_t edge_mode;
11099 uint8_t noise_red_mode;
11100 uint8_t tonemap_mode;
11101 bool highQualityModeEntryAvailable = FALSE;
11102 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011103 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011104 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11105 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011106 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011107 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011108 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011109
Thierry Strudel3d639192016-09-09 11:52:26 -070011110 switch (type) {
11111 case CAMERA3_TEMPLATE_PREVIEW:
11112 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11113 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11114 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11115 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11116 edge_mode = ANDROID_EDGE_MODE_FAST;
11117 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11118 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11119 break;
11120 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11121 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11122 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11123 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11124 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11125 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11126 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11127 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11128 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11129 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11130 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11131 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11132 highQualityModeEntryAvailable = TRUE;
11133 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11134 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11135 fastModeEntryAvailable = TRUE;
11136 }
11137 }
11138 if (highQualityModeEntryAvailable) {
11139 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11140 } else if (fastModeEntryAvailable) {
11141 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11142 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011143 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11144 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11145 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011146 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011147 break;
11148 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11149 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11150 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11151 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011152 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11153 edge_mode = ANDROID_EDGE_MODE_FAST;
11154 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11155 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11156 if (forceVideoOis)
11157 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11158 break;
11159 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11160 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11161 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11162 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011163 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11164 edge_mode = ANDROID_EDGE_MODE_FAST;
11165 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11166 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11167 if (forceVideoOis)
11168 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11169 break;
11170 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11171 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11172 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11173 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11174 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11175 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11176 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11177 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11178 break;
11179 case CAMERA3_TEMPLATE_MANUAL:
11180 edge_mode = ANDROID_EDGE_MODE_FAST;
11181 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11182 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11183 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11184 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11185 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11186 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11187 break;
11188 default:
11189 edge_mode = ANDROID_EDGE_MODE_FAST;
11190 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11191 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11192 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11193 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11194 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11195 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11196 break;
11197 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011198 // Set CAC to OFF if underlying device doesn't support
11199 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11200 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11201 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011202 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11203 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11204 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11205 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11206 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11207 }
11208 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011209 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011210 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011211
11212 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11213 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11214 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11215 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11216 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11217 || ois_disable)
11218 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11219 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011220 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011221
11222 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11223 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11224
11225 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11226 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11227
11228 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11229 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11230
11231 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11232 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11233
11234 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11235 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11236
11237 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11238 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11239
11240 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11241 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11242
11243 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11244 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11245
11246 /*flash*/
11247 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11248 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11249
11250 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11251 settings.update(ANDROID_FLASH_FIRING_POWER,
11252 &flashFiringLevel, 1);
11253
11254 /* lens */
11255 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11256 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11257
11258 if (gCamCapability[mCameraId]->filter_densities_count) {
11259 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11260 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11261 gCamCapability[mCameraId]->filter_densities_count);
11262 }
11263
11264 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11265 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11266
Thierry Strudel3d639192016-09-09 11:52:26 -070011267 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11268 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11269
11270 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11271 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11272
11273 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11274 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11275
11276 /* face detection (default to OFF) */
11277 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11278 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11279
Thierry Strudel54dc9782017-02-15 12:12:10 -080011280 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11281 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011282
11283 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11284 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11285
11286 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11287 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11288
Thierry Strudel3d639192016-09-09 11:52:26 -070011289
11290 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11291 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11292
11293 /* Exposure time(Update the Min Exposure Time)*/
11294 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11295 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11296
11297 /* frame duration */
11298 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11299 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11300
11301 /* sensitivity */
11302 static const int32_t default_sensitivity = 100;
11303 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011304#ifndef USE_HAL_3_3
11305 static const int32_t default_isp_sensitivity =
11306 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11307 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11308#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011309
11310 /*edge mode*/
11311 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11312
11313 /*noise reduction mode*/
11314 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11315
11316 /*color correction mode*/
11317 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11318 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11319
11320 /*transform matrix mode*/
11321 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11322
11323 int32_t scaler_crop_region[4];
11324 scaler_crop_region[0] = 0;
11325 scaler_crop_region[1] = 0;
11326 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11327 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11328 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11329
11330 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11331 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11332
11333 /*focus distance*/
11334 float focus_distance = 0.0;
11335 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11336
11337 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011338 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011339 float max_range = 0.0;
11340 float max_fixed_fps = 0.0;
11341 int32_t fps_range[2] = {0, 0};
11342 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11343 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011344 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11345 TEMPLATE_MAX_PREVIEW_FPS) {
11346 continue;
11347 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011348 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11349 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11350 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11351 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11352 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11353 if (range > max_range) {
11354 fps_range[0] =
11355 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11356 fps_range[1] =
11357 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11358 max_range = range;
11359 }
11360 } else {
11361 if (range < 0.01 && max_fixed_fps <
11362 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11363 fps_range[0] =
11364 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11365 fps_range[1] =
11366 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11367 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11368 }
11369 }
11370 }
11371 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11372
11373 /*precapture trigger*/
11374 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11375 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11376
11377 /*af trigger*/
11378 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11379 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11380
11381 /* ae & af regions */
11382 int32_t active_region[] = {
11383 gCamCapability[mCameraId]->active_array_size.left,
11384 gCamCapability[mCameraId]->active_array_size.top,
11385 gCamCapability[mCameraId]->active_array_size.left +
11386 gCamCapability[mCameraId]->active_array_size.width,
11387 gCamCapability[mCameraId]->active_array_size.top +
11388 gCamCapability[mCameraId]->active_array_size.height,
11389 0};
11390 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11391 sizeof(active_region) / sizeof(active_region[0]));
11392 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11393 sizeof(active_region) / sizeof(active_region[0]));
11394
11395 /* black level lock */
11396 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11397 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11398
Thierry Strudel3d639192016-09-09 11:52:26 -070011399 //special defaults for manual template
11400 if (type == CAMERA3_TEMPLATE_MANUAL) {
11401 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11402 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11403
11404 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11405 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11406
11407 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11408 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11409
11410 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11411 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11412
11413 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11414 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11415
11416 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11417 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11418 }
11419
11420
11421 /* TNR
11422 * We'll use this location to determine which modes TNR will be set.
11423 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11424 * This is not to be confused with linking on a per stream basis that decision
11425 * is still on per-session basis and will be handled as part of config stream
11426 */
11427 uint8_t tnr_enable = 0;
11428
11429 if (m_bTnrPreview || m_bTnrVideo) {
11430
11431 switch (type) {
11432 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11433 tnr_enable = 1;
11434 break;
11435
11436 default:
11437 tnr_enable = 0;
11438 break;
11439 }
11440
11441 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11442 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11443 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11444
11445 LOGD("TNR:%d with process plate %d for template:%d",
11446 tnr_enable, tnr_process_type, type);
11447 }
11448
11449 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011450 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011451 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11452
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011453 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011454 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11455
Shuzhen Wang920ea402017-05-03 08:49:39 -070011456 uint8_t related_camera_id = mCameraId;
11457 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011458
11459 /* CDS default */
11460 char prop[PROPERTY_VALUE_MAX];
11461 memset(prop, 0, sizeof(prop));
11462 property_get("persist.camera.CDS", prop, "Auto");
11463 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11464 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11465 if (CAM_CDS_MODE_MAX == cds_mode) {
11466 cds_mode = CAM_CDS_MODE_AUTO;
11467 }
11468
11469 /* Disabling CDS in templates which have TNR enabled*/
11470 if (tnr_enable)
11471 cds_mode = CAM_CDS_MODE_OFF;
11472
11473 int32_t mode = cds_mode;
11474 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011475
Thierry Strudel269c81a2016-10-12 12:13:59 -070011476 /* Manual Convergence AEC Speed is disabled by default*/
11477 float default_aec_speed = 0;
11478 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11479
11480 /* Manual Convergence AWB Speed is disabled by default*/
11481 float default_awb_speed = 0;
11482 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11483
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011484 // Set instant AEC to normal convergence by default
11485 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11486 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11487
Shuzhen Wang19463d72016-03-08 11:09:52 -080011488 /* hybrid ae */
11489 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11490
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011491 if (gExposeEnableZslKey) {
11492 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11493 }
11494
Thierry Strudel3d639192016-09-09 11:52:26 -070011495 mDefaultMetadata[type] = settings.release();
11496
11497 return mDefaultMetadata[type];
11498}
11499
11500/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011501 * FUNCTION : getExpectedFrameDuration
11502 *
11503 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11504 * duration
11505 *
11506 * PARAMETERS :
11507 * @request : request settings
11508 * @frameDuration : The maximum frame duration in nanoseconds
11509 *
11510 * RETURN : None
11511 *==========================================================================*/
11512void QCamera3HardwareInterface::getExpectedFrameDuration(
11513 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11514 if (nullptr == frameDuration) {
11515 return;
11516 }
11517
11518 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11519 find_camera_metadata_ro_entry(request,
11520 ANDROID_SENSOR_EXPOSURE_TIME,
11521 &e);
11522 if (e.count > 0) {
11523 *frameDuration = e.data.i64[0];
11524 }
11525 find_camera_metadata_ro_entry(request,
11526 ANDROID_SENSOR_FRAME_DURATION,
11527 &e);
11528 if (e.count > 0) {
11529 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11530 }
11531}
11532
11533/*===========================================================================
11534 * FUNCTION : calculateMaxExpectedDuration
11535 *
11536 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11537 * current camera settings.
11538 *
11539 * PARAMETERS :
11540 * @request : request settings
11541 *
11542 * RETURN : Expected frame duration in nanoseconds.
11543 *==========================================================================*/
11544nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11545 const camera_metadata_t *request) {
11546 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11547 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11548 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11549 if (e.count == 0) {
11550 return maxExpectedDuration;
11551 }
11552
11553 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11554 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11555 }
11556
11557 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11558 return maxExpectedDuration;
11559 }
11560
11561 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11562 if (e.count == 0) {
11563 return maxExpectedDuration;
11564 }
11565
11566 switch (e.data.u8[0]) {
11567 case ANDROID_CONTROL_AE_MODE_OFF:
11568 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11569 break;
11570 default:
11571 find_camera_metadata_ro_entry(request,
11572 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11573 &e);
11574 if (e.count > 1) {
11575 maxExpectedDuration = 1e9 / e.data.u8[0];
11576 }
11577 break;
11578 }
11579
11580 return maxExpectedDuration;
11581}
11582
11583/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011584 * FUNCTION : setFrameParameters
11585 *
11586 * DESCRIPTION: set parameters per frame as requested in the metadata from
11587 * framework
11588 *
11589 * PARAMETERS :
11590 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011591 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011592 * @blob_request: Whether this request is a blob request or not
11593 *
11594 * RETURN : success: NO_ERROR
11595 * failure:
11596 *==========================================================================*/
11597int QCamera3HardwareInterface::setFrameParameters(
11598 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011599 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011600 int blob_request,
11601 uint32_t snapshotStreamId)
11602{
11603 /*translate from camera_metadata_t type to parm_type_t*/
11604 int rc = 0;
11605 int32_t hal_version = CAM_HAL_V3;
11606
11607 clear_metadata_buffer(mParameters);
11608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11609 LOGE("Failed to set hal version in the parameters");
11610 return BAD_VALUE;
11611 }
11612
11613 /*we need to update the frame number in the parameters*/
11614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11615 request->frame_number)) {
11616 LOGE("Failed to set the frame number in the parameters");
11617 return BAD_VALUE;
11618 }
11619
11620 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011622 LOGE("Failed to set stream type mask in the parameters");
11623 return BAD_VALUE;
11624 }
11625
11626 if (mUpdateDebugLevel) {
11627 uint32_t dummyDebugLevel = 0;
11628 /* The value of dummyDebugLevel is irrelavent. On
11629 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11630 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11631 dummyDebugLevel)) {
11632 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11633 return BAD_VALUE;
11634 }
11635 mUpdateDebugLevel = false;
11636 }
11637
11638 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011639 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011640 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11641 if (blob_request)
11642 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11643 }
11644
11645 return rc;
11646}
11647
11648/*===========================================================================
11649 * FUNCTION : setReprocParameters
11650 *
11651 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11652 * return it.
11653 *
11654 * PARAMETERS :
11655 * @request : request that needs to be serviced
11656 *
11657 * RETURN : success: NO_ERROR
11658 * failure:
11659 *==========================================================================*/
11660int32_t QCamera3HardwareInterface::setReprocParameters(
11661 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11662 uint32_t snapshotStreamId)
11663{
11664 /*translate from camera_metadata_t type to parm_type_t*/
11665 int rc = 0;
11666
11667 if (NULL == request->settings){
11668 LOGE("Reprocess settings cannot be NULL");
11669 return BAD_VALUE;
11670 }
11671
11672 if (NULL == reprocParam) {
11673 LOGE("Invalid reprocessing metadata buffer");
11674 return BAD_VALUE;
11675 }
11676 clear_metadata_buffer(reprocParam);
11677
11678 /*we need to update the frame number in the parameters*/
11679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11680 request->frame_number)) {
11681 LOGE("Failed to set the frame number in the parameters");
11682 return BAD_VALUE;
11683 }
11684
11685 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11686 if (rc < 0) {
11687 LOGE("Failed to translate reproc request");
11688 return rc;
11689 }
11690
11691 CameraMetadata frame_settings;
11692 frame_settings = request->settings;
11693 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11694 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11695 int32_t *crop_count =
11696 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11697 int32_t *crop_data =
11698 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11699 int32_t *roi_map =
11700 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11701 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11702 cam_crop_data_t crop_meta;
11703 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11704 crop_meta.num_of_streams = 1;
11705 crop_meta.crop_info[0].crop.left = crop_data[0];
11706 crop_meta.crop_info[0].crop.top = crop_data[1];
11707 crop_meta.crop_info[0].crop.width = crop_data[2];
11708 crop_meta.crop_info[0].crop.height = crop_data[3];
11709
11710 crop_meta.crop_info[0].roi_map.left =
11711 roi_map[0];
11712 crop_meta.crop_info[0].roi_map.top =
11713 roi_map[1];
11714 crop_meta.crop_info[0].roi_map.width =
11715 roi_map[2];
11716 crop_meta.crop_info[0].roi_map.height =
11717 roi_map[3];
11718
11719 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11720 rc = BAD_VALUE;
11721 }
11722 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11723 request->input_buffer->stream,
11724 crop_meta.crop_info[0].crop.left,
11725 crop_meta.crop_info[0].crop.top,
11726 crop_meta.crop_info[0].crop.width,
11727 crop_meta.crop_info[0].crop.height);
11728 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11729 request->input_buffer->stream,
11730 crop_meta.crop_info[0].roi_map.left,
11731 crop_meta.crop_info[0].roi_map.top,
11732 crop_meta.crop_info[0].roi_map.width,
11733 crop_meta.crop_info[0].roi_map.height);
11734 } else {
11735 LOGE("Invalid reprocess crop count %d!", *crop_count);
11736 }
11737 } else {
11738 LOGE("No crop data from matching output stream");
11739 }
11740
11741 /* These settings are not needed for regular requests so handle them specially for
11742 reprocess requests; information needed for EXIF tags */
11743 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11744 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11745 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11746 if (NAME_NOT_FOUND != val) {
11747 uint32_t flashMode = (uint32_t)val;
11748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11749 rc = BAD_VALUE;
11750 }
11751 } else {
11752 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11753 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11754 }
11755 } else {
11756 LOGH("No flash mode in reprocess settings");
11757 }
11758
11759 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11760 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11761 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11762 rc = BAD_VALUE;
11763 }
11764 } else {
11765 LOGH("No flash state in reprocess settings");
11766 }
11767
11768 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11769 uint8_t *reprocessFlags =
11770 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11771 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11772 *reprocessFlags)) {
11773 rc = BAD_VALUE;
11774 }
11775 }
11776
Thierry Strudel54dc9782017-02-15 12:12:10 -080011777 // Add exif debug data to internal metadata
11778 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11779 mm_jpeg_debug_exif_params_t *debug_params =
11780 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11781 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11782 // AE
11783 if (debug_params->ae_debug_params_valid == TRUE) {
11784 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11785 debug_params->ae_debug_params);
11786 }
11787 // AWB
11788 if (debug_params->awb_debug_params_valid == TRUE) {
11789 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11790 debug_params->awb_debug_params);
11791 }
11792 // AF
11793 if (debug_params->af_debug_params_valid == TRUE) {
11794 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11795 debug_params->af_debug_params);
11796 }
11797 // ASD
11798 if (debug_params->asd_debug_params_valid == TRUE) {
11799 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11800 debug_params->asd_debug_params);
11801 }
11802 // Stats
11803 if (debug_params->stats_debug_params_valid == TRUE) {
11804 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11805 debug_params->stats_debug_params);
11806 }
11807 // BE Stats
11808 if (debug_params->bestats_debug_params_valid == TRUE) {
11809 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11810 debug_params->bestats_debug_params);
11811 }
11812 // BHIST
11813 if (debug_params->bhist_debug_params_valid == TRUE) {
11814 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11815 debug_params->bhist_debug_params);
11816 }
11817 // 3A Tuning
11818 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11819 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11820 debug_params->q3a_tuning_debug_params);
11821 }
11822 }
11823
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011824 // Add metadata which reprocess needs
11825 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11826 cam_reprocess_info_t *repro_info =
11827 (cam_reprocess_info_t *)frame_settings.find
11828 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011829 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011830 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011831 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011832 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011833 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011834 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011835 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011836 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011837 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011838 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011839 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011840 repro_info->pipeline_flip);
11841 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11842 repro_info->af_roi);
11843 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11844 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011845 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11846 CAM_INTF_PARM_ROTATION metadata then has been added in
11847 translateToHalMetadata. HAL need to keep this new rotation
11848 metadata. Otherwise, the old rotation info saved in the vendor tag
11849 would be used */
11850 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11851 CAM_INTF_PARM_ROTATION, reprocParam) {
11852 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11853 } else {
11854 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011855 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011856 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011857 }
11858
11859 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11860 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11861 roi.width and roi.height would be the final JPEG size.
11862 For now, HAL only checks this for reprocess request */
11863 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11864 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11865 uint8_t *enable =
11866 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11867 if (*enable == TRUE) {
11868 int32_t *crop_data =
11869 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11870 cam_stream_crop_info_t crop_meta;
11871 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11872 crop_meta.stream_id = 0;
11873 crop_meta.crop.left = crop_data[0];
11874 crop_meta.crop.top = crop_data[1];
11875 crop_meta.crop.width = crop_data[2];
11876 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011877 // The JPEG crop roi should match cpp output size
11878 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11879 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11880 crop_meta.roi_map.left = 0;
11881 crop_meta.roi_map.top = 0;
11882 crop_meta.roi_map.width = cpp_crop->crop.width;
11883 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011884 }
11885 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11886 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011887 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011888 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011889 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11890 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011891 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011892 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11893
11894 // Add JPEG scale information
11895 cam_dimension_t scale_dim;
11896 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11897 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11898 int32_t *roi =
11899 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11900 scale_dim.width = roi[2];
11901 scale_dim.height = roi[3];
11902 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11903 scale_dim);
11904 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11905 scale_dim.width, scale_dim.height, mCameraId);
11906 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011907 }
11908 }
11909
11910 return rc;
11911}
11912
11913/*===========================================================================
11914 * FUNCTION : saveRequestSettings
11915 *
11916 * DESCRIPTION: Add any settings that might have changed to the request settings
11917 * and save the settings to be applied on the frame
11918 *
11919 * PARAMETERS :
11920 * @jpegMetadata : the extracted and/or modified jpeg metadata
11921 * @request : request with initial settings
11922 *
11923 * RETURN :
11924 * camera_metadata_t* : pointer to the saved request settings
11925 *==========================================================================*/
11926camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11927 const CameraMetadata &jpegMetadata,
11928 camera3_capture_request_t *request)
11929{
11930 camera_metadata_t *resultMetadata;
11931 CameraMetadata camMetadata;
11932 camMetadata = request->settings;
11933
11934 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11935 int32_t thumbnail_size[2];
11936 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11937 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11938 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11939 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11940 }
11941
11942 if (request->input_buffer != NULL) {
11943 uint8_t reprocessFlags = 1;
11944 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11945 (uint8_t*)&reprocessFlags,
11946 sizeof(reprocessFlags));
11947 }
11948
11949 resultMetadata = camMetadata.release();
11950 return resultMetadata;
11951}
11952
11953/*===========================================================================
11954 * FUNCTION : setHalFpsRange
11955 *
11956 * DESCRIPTION: set FPS range parameter
11957 *
11958 *
11959 * PARAMETERS :
11960 * @settings : Metadata from framework
11961 * @hal_metadata: Metadata buffer
11962 *
11963 *
11964 * RETURN : success: NO_ERROR
11965 * failure:
11966 *==========================================================================*/
11967int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11968 metadata_buffer_t *hal_metadata)
11969{
11970 int32_t rc = NO_ERROR;
11971 cam_fps_range_t fps_range;
11972 fps_range.min_fps = (float)
11973 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11974 fps_range.max_fps = (float)
11975 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11976 fps_range.video_min_fps = fps_range.min_fps;
11977 fps_range.video_max_fps = fps_range.max_fps;
11978
11979 LOGD("aeTargetFpsRange fps: [%f %f]",
11980 fps_range.min_fps, fps_range.max_fps);
11981 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11982 * follows:
11983 * ---------------------------------------------------------------|
11984 * Video stream is absent in configure_streams |
11985 * (Camcorder preview before the first video record |
11986 * ---------------------------------------------------------------|
11987 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11988 * | | | vid_min/max_fps|
11989 * ---------------------------------------------------------------|
11990 * NO | [ 30, 240] | 240 | [240, 240] |
11991 * |-------------|-------------|----------------|
11992 * | [240, 240] | 240 | [240, 240] |
11993 * ---------------------------------------------------------------|
11994 * Video stream is present in configure_streams |
11995 * ---------------------------------------------------------------|
11996 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11997 * | | | vid_min/max_fps|
11998 * ---------------------------------------------------------------|
11999 * NO | [ 30, 240] | 240 | [240, 240] |
12000 * (camcorder prev |-------------|-------------|----------------|
12001 * after video rec | [240, 240] | 240 | [240, 240] |
12002 * is stopped) | | | |
12003 * ---------------------------------------------------------------|
12004 * YES | [ 30, 240] | 240 | [240, 240] |
12005 * |-------------|-------------|----------------|
12006 * | [240, 240] | 240 | [240, 240] |
12007 * ---------------------------------------------------------------|
12008 * When Video stream is absent in configure_streams,
12009 * preview fps = sensor_fps / batchsize
12010 * Eg: for 240fps at batchSize 4, preview = 60fps
12011 * for 120fps at batchSize 4, preview = 30fps
12012 *
12013 * When video stream is present in configure_streams, preview fps is as per
12014 * the ratio of preview buffers to video buffers requested in process
12015 * capture request
12016 */
12017 mBatchSize = 0;
12018 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12019 fps_range.min_fps = fps_range.video_max_fps;
12020 fps_range.video_min_fps = fps_range.video_max_fps;
12021 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12022 fps_range.max_fps);
12023 if (NAME_NOT_FOUND != val) {
12024 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12025 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12026 return BAD_VALUE;
12027 }
12028
12029 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12030 /* If batchmode is currently in progress and the fps changes,
12031 * set the flag to restart the sensor */
12032 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12033 (mHFRVideoFps != fps_range.max_fps)) {
12034 mNeedSensorRestart = true;
12035 }
12036 mHFRVideoFps = fps_range.max_fps;
12037 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12038 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12039 mBatchSize = MAX_HFR_BATCH_SIZE;
12040 }
12041 }
12042 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12043
12044 }
12045 } else {
12046 /* HFR mode is session param in backend/ISP. This should be reset when
12047 * in non-HFR mode */
12048 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12049 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12050 return BAD_VALUE;
12051 }
12052 }
12053 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12054 return BAD_VALUE;
12055 }
12056 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12057 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12058 return rc;
12059}
12060
12061/*===========================================================================
12062 * FUNCTION : translateToHalMetadata
12063 *
12064 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12065 *
12066 *
12067 * PARAMETERS :
12068 * @request : request sent from framework
12069 *
12070 *
12071 * RETURN : success: NO_ERROR
12072 * failure:
12073 *==========================================================================*/
12074int QCamera3HardwareInterface::translateToHalMetadata
12075 (const camera3_capture_request_t *request,
12076 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012077 uint32_t snapshotStreamId) {
12078 if (request == nullptr || hal_metadata == nullptr) {
12079 return BAD_VALUE;
12080 }
12081
12082 int64_t minFrameDuration = getMinFrameDuration(request);
12083
12084 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12085 minFrameDuration);
12086}
12087
12088int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12089 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12090 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12091
Thierry Strudel3d639192016-09-09 11:52:26 -070012092 int rc = 0;
12093 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012094 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012095
12096 /* Do not change the order of the following list unless you know what you are
12097 * doing.
12098 * The order is laid out in such a way that parameters in the front of the table
12099 * may be used to override the parameters later in the table. Examples are:
12100 * 1. META_MODE should precede AEC/AWB/AF MODE
12101 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12102 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12103 * 4. Any mode should precede it's corresponding settings
12104 */
12105 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12106 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12107 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12108 rc = BAD_VALUE;
12109 }
12110 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12111 if (rc != NO_ERROR) {
12112 LOGE("extractSceneMode failed");
12113 }
12114 }
12115
12116 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12117 uint8_t fwk_aeMode =
12118 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12119 uint8_t aeMode;
12120 int32_t redeye;
12121
12122 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12123 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012124 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12125 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012126 } else {
12127 aeMode = CAM_AE_MODE_ON;
12128 }
12129 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12130 redeye = 1;
12131 } else {
12132 redeye = 0;
12133 }
12134
12135 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12136 fwk_aeMode);
12137 if (NAME_NOT_FOUND != val) {
12138 int32_t flashMode = (int32_t)val;
12139 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12140 }
12141
12142 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12144 rc = BAD_VALUE;
12145 }
12146 }
12147
12148 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12149 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12150 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12151 fwk_whiteLevel);
12152 if (NAME_NOT_FOUND != val) {
12153 uint8_t whiteLevel = (uint8_t)val;
12154 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12155 rc = BAD_VALUE;
12156 }
12157 }
12158 }
12159
12160 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12161 uint8_t fwk_cacMode =
12162 frame_settings.find(
12163 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12164 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12165 fwk_cacMode);
12166 if (NAME_NOT_FOUND != val) {
12167 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12168 bool entryAvailable = FALSE;
12169 // Check whether Frameworks set CAC mode is supported in device or not
12170 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12171 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12172 entryAvailable = TRUE;
12173 break;
12174 }
12175 }
12176 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12177 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12178 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12179 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12180 if (entryAvailable == FALSE) {
12181 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12182 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12183 } else {
12184 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12185 // High is not supported and so set the FAST as spec say's underlying
12186 // device implementation can be the same for both modes.
12187 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12188 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12189 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12190 // in order to avoid the fps drop due to high quality
12191 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12192 } else {
12193 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12194 }
12195 }
12196 }
12197 LOGD("Final cacMode is %d", cacMode);
12198 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12199 rc = BAD_VALUE;
12200 }
12201 } else {
12202 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12203 }
12204 }
12205
Jason Lee84ae9972017-02-24 13:24:24 -080012206 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012207 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012208 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012209 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012210 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12211 fwk_focusMode);
12212 if (NAME_NOT_FOUND != val) {
12213 uint8_t focusMode = (uint8_t)val;
12214 LOGD("set focus mode %d", focusMode);
12215 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12216 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12217 rc = BAD_VALUE;
12218 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012219 }
12220 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012221 } else {
12222 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12223 LOGE("Focus forced to infinity %d", focusMode);
12224 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12225 rc = BAD_VALUE;
12226 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012227 }
12228
Jason Lee84ae9972017-02-24 13:24:24 -080012229 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12230 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012231 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12232 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12233 focalDistance)) {
12234 rc = BAD_VALUE;
12235 }
12236 }
12237
12238 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12239 uint8_t fwk_antibandingMode =
12240 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12241 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12242 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12243 if (NAME_NOT_FOUND != val) {
12244 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012245 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12246 if (m60HzZone) {
12247 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12248 } else {
12249 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12250 }
12251 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012252 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12253 hal_antibandingMode)) {
12254 rc = BAD_VALUE;
12255 }
12256 }
12257 }
12258
12259 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12260 int32_t expCompensation = frame_settings.find(
12261 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12262 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12263 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12264 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12265 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012266 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012267 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12268 expCompensation)) {
12269 rc = BAD_VALUE;
12270 }
12271 }
12272
12273 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12274 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12275 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12276 rc = BAD_VALUE;
12277 }
12278 }
12279 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12280 rc = setHalFpsRange(frame_settings, hal_metadata);
12281 if (rc != NO_ERROR) {
12282 LOGE("setHalFpsRange failed");
12283 }
12284 }
12285
12286 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12287 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12289 rc = BAD_VALUE;
12290 }
12291 }
12292
12293 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12294 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12295 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12296 fwk_effectMode);
12297 if (NAME_NOT_FOUND != val) {
12298 uint8_t effectMode = (uint8_t)val;
12299 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12300 rc = BAD_VALUE;
12301 }
12302 }
12303 }
12304
12305 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12306 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12307 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12308 colorCorrectMode)) {
12309 rc = BAD_VALUE;
12310 }
12311 }
12312
12313 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12314 cam_color_correct_gains_t colorCorrectGains;
12315 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12316 colorCorrectGains.gains[i] =
12317 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12318 }
12319 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12320 colorCorrectGains)) {
12321 rc = BAD_VALUE;
12322 }
12323 }
12324
12325 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12326 cam_color_correct_matrix_t colorCorrectTransform;
12327 cam_rational_type_t transform_elem;
12328 size_t num = 0;
12329 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12330 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12331 transform_elem.numerator =
12332 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12333 transform_elem.denominator =
12334 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12335 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12336 num++;
12337 }
12338 }
12339 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12340 colorCorrectTransform)) {
12341 rc = BAD_VALUE;
12342 }
12343 }
12344
12345 cam_trigger_t aecTrigger;
12346 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12347 aecTrigger.trigger_id = -1;
12348 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12349 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12350 aecTrigger.trigger =
12351 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12352 aecTrigger.trigger_id =
12353 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12354 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12355 aecTrigger)) {
12356 rc = BAD_VALUE;
12357 }
12358 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12359 aecTrigger.trigger, aecTrigger.trigger_id);
12360 }
12361
12362 /*af_trigger must come with a trigger id*/
12363 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12364 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12365 cam_trigger_t af_trigger;
12366 af_trigger.trigger =
12367 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12368 af_trigger.trigger_id =
12369 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12371 rc = BAD_VALUE;
12372 }
12373 LOGD("AfTrigger: %d AfTriggerID: %d",
12374 af_trigger.trigger, af_trigger.trigger_id);
12375 }
12376
12377 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12378 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12379 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12380 rc = BAD_VALUE;
12381 }
12382 }
12383 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12384 cam_edge_application_t edge_application;
12385 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012386
Thierry Strudel3d639192016-09-09 11:52:26 -070012387 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12388 edge_application.sharpness = 0;
12389 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012390 edge_application.sharpness =
12391 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12392 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12393 int32_t sharpness =
12394 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12395 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12396 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12397 LOGD("Setting edge mode sharpness %d", sharpness);
12398 edge_application.sharpness = sharpness;
12399 }
12400 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012401 }
12402 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12403 rc = BAD_VALUE;
12404 }
12405 }
12406
12407 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12408 int32_t respectFlashMode = 1;
12409 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12410 uint8_t fwk_aeMode =
12411 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012412 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12413 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12414 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012415 respectFlashMode = 0;
12416 LOGH("AE Mode controls flash, ignore android.flash.mode");
12417 }
12418 }
12419 if (respectFlashMode) {
12420 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12421 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12422 LOGH("flash mode after mapping %d", val);
12423 // To check: CAM_INTF_META_FLASH_MODE usage
12424 if (NAME_NOT_FOUND != val) {
12425 uint8_t flashMode = (uint8_t)val;
12426 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12427 rc = BAD_VALUE;
12428 }
12429 }
12430 }
12431 }
12432
12433 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12434 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12435 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12436 rc = BAD_VALUE;
12437 }
12438 }
12439
12440 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12441 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12442 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12443 flashFiringTime)) {
12444 rc = BAD_VALUE;
12445 }
12446 }
12447
12448 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12449 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12450 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12451 hotPixelMode)) {
12452 rc = BAD_VALUE;
12453 }
12454 }
12455
12456 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12457 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12458 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12459 lensAperture)) {
12460 rc = BAD_VALUE;
12461 }
12462 }
12463
12464 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12465 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12466 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12467 filterDensity)) {
12468 rc = BAD_VALUE;
12469 }
12470 }
12471
12472 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12473 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12474 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12475 focalLength)) {
12476 rc = BAD_VALUE;
12477 }
12478 }
12479
12480 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12481 uint8_t optStabMode =
12482 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12483 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12484 optStabMode)) {
12485 rc = BAD_VALUE;
12486 }
12487 }
12488
12489 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12490 uint8_t videoStabMode =
12491 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12492 LOGD("videoStabMode from APP = %d", videoStabMode);
12493 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12494 videoStabMode)) {
12495 rc = BAD_VALUE;
12496 }
12497 }
12498
12499
12500 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12501 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12502 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12503 noiseRedMode)) {
12504 rc = BAD_VALUE;
12505 }
12506 }
12507
12508 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12509 float reprocessEffectiveExposureFactor =
12510 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12512 reprocessEffectiveExposureFactor)) {
12513 rc = BAD_VALUE;
12514 }
12515 }
12516
12517 cam_crop_region_t scalerCropRegion;
12518 bool scalerCropSet = false;
12519 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12520 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12521 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12522 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12523 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12524
12525 // Map coordinate system from active array to sensor output.
12526 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12527 scalerCropRegion.width, scalerCropRegion.height);
12528
12529 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12530 scalerCropRegion)) {
12531 rc = BAD_VALUE;
12532 }
12533 scalerCropSet = true;
12534 }
12535
12536 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12537 int64_t sensorExpTime =
12538 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12539 LOGD("setting sensorExpTime %lld", sensorExpTime);
12540 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12541 sensorExpTime)) {
12542 rc = BAD_VALUE;
12543 }
12544 }
12545
12546 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12547 int64_t sensorFrameDuration =
12548 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012549 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12550 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12551 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12552 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12553 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12554 sensorFrameDuration)) {
12555 rc = BAD_VALUE;
12556 }
12557 }
12558
12559 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12560 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12561 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12562 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12563 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12564 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12565 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12566 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12567 sensorSensitivity)) {
12568 rc = BAD_VALUE;
12569 }
12570 }
12571
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012572#ifndef USE_HAL_3_3
12573 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12574 int32_t ispSensitivity =
12575 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12576 if (ispSensitivity <
12577 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12578 ispSensitivity =
12579 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12580 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12581 }
12582 if (ispSensitivity >
12583 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12584 ispSensitivity =
12585 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12586 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12587 }
12588 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12589 ispSensitivity)) {
12590 rc = BAD_VALUE;
12591 }
12592 }
12593#endif
12594
Thierry Strudel3d639192016-09-09 11:52:26 -070012595 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12596 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12597 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12598 rc = BAD_VALUE;
12599 }
12600 }
12601
12602 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12603 uint8_t fwk_facedetectMode =
12604 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12605
12606 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12607 fwk_facedetectMode);
12608
12609 if (NAME_NOT_FOUND != val) {
12610 uint8_t facedetectMode = (uint8_t)val;
12611 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12612 facedetectMode)) {
12613 rc = BAD_VALUE;
12614 }
12615 }
12616 }
12617
Thierry Strudel54dc9782017-02-15 12:12:10 -080012618 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012619 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012620 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12622 histogramMode)) {
12623 rc = BAD_VALUE;
12624 }
12625 }
12626
12627 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12628 uint8_t sharpnessMapMode =
12629 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12630 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12631 sharpnessMapMode)) {
12632 rc = BAD_VALUE;
12633 }
12634 }
12635
12636 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12637 uint8_t tonemapMode =
12638 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12639 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12640 rc = BAD_VALUE;
12641 }
12642 }
12643 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12644 /*All tonemap channels will have the same number of points*/
12645 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12646 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12647 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12648 cam_rgb_tonemap_curves tonemapCurves;
12649 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12650 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12651 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12652 tonemapCurves.tonemap_points_cnt,
12653 CAM_MAX_TONEMAP_CURVE_SIZE);
12654 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12655 }
12656
12657 /* ch0 = G*/
12658 size_t point = 0;
12659 cam_tonemap_curve_t tonemapCurveGreen;
12660 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12661 for (size_t j = 0; j < 2; j++) {
12662 tonemapCurveGreen.tonemap_points[i][j] =
12663 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12664 point++;
12665 }
12666 }
12667 tonemapCurves.curves[0] = tonemapCurveGreen;
12668
12669 /* ch 1 = B */
12670 point = 0;
12671 cam_tonemap_curve_t tonemapCurveBlue;
12672 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12673 for (size_t j = 0; j < 2; j++) {
12674 tonemapCurveBlue.tonemap_points[i][j] =
12675 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12676 point++;
12677 }
12678 }
12679 tonemapCurves.curves[1] = tonemapCurveBlue;
12680
12681 /* ch 2 = R */
12682 point = 0;
12683 cam_tonemap_curve_t tonemapCurveRed;
12684 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12685 for (size_t j = 0; j < 2; j++) {
12686 tonemapCurveRed.tonemap_points[i][j] =
12687 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12688 point++;
12689 }
12690 }
12691 tonemapCurves.curves[2] = tonemapCurveRed;
12692
12693 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12694 tonemapCurves)) {
12695 rc = BAD_VALUE;
12696 }
12697 }
12698
12699 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12700 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12701 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12702 captureIntent)) {
12703 rc = BAD_VALUE;
12704 }
12705 }
12706
12707 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12708 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12709 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12710 blackLevelLock)) {
12711 rc = BAD_VALUE;
12712 }
12713 }
12714
12715 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12716 uint8_t lensShadingMapMode =
12717 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12718 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12719 lensShadingMapMode)) {
12720 rc = BAD_VALUE;
12721 }
12722 }
12723
12724 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12725 cam_area_t roi;
12726 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012727 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012728
12729 // Map coordinate system from active array to sensor output.
12730 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12731 roi.rect.height);
12732
12733 if (scalerCropSet) {
12734 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12735 }
12736 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12737 rc = BAD_VALUE;
12738 }
12739 }
12740
12741 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12742 cam_area_t roi;
12743 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012744 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012745
12746 // Map coordinate system from active array to sensor output.
12747 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12748 roi.rect.height);
12749
12750 if (scalerCropSet) {
12751 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12752 }
12753 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12754 rc = BAD_VALUE;
12755 }
12756 }
12757
12758 // CDS for non-HFR non-video mode
12759 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12760 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12761 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12762 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12763 LOGE("Invalid CDS mode %d!", *fwk_cds);
12764 } else {
12765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12766 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12767 rc = BAD_VALUE;
12768 }
12769 }
12770 }
12771
Thierry Strudel04e026f2016-10-10 11:27:36 -070012772 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012773 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012774 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012775 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12776 }
12777 if (m_bVideoHdrEnabled)
12778 vhdr = CAM_VIDEO_HDR_MODE_ON;
12779
Thierry Strudel54dc9782017-02-15 12:12:10 -080012780 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12781
12782 if(vhdr != curr_hdr_state)
12783 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12784
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012785 rc = setVideoHdrMode(mParameters, vhdr);
12786 if (rc != NO_ERROR) {
12787 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012788 }
12789
12790 //IR
12791 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12792 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12793 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012794 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12795 uint8_t isIRon = 0;
12796
12797 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012798 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12799 LOGE("Invalid IR mode %d!", fwk_ir);
12800 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012801 if(isIRon != curr_ir_state )
12802 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12803
Thierry Strudel04e026f2016-10-10 11:27:36 -070012804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12805 CAM_INTF_META_IR_MODE, fwk_ir)) {
12806 rc = BAD_VALUE;
12807 }
12808 }
12809 }
12810
Thierry Strudel54dc9782017-02-15 12:12:10 -080012811 //Binning Correction Mode
12812 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12813 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12814 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12815 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12816 || (0 > fwk_binning_correction)) {
12817 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12818 } else {
12819 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12820 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12821 rc = BAD_VALUE;
12822 }
12823 }
12824 }
12825
Thierry Strudel269c81a2016-10-12 12:13:59 -070012826 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12827 float aec_speed;
12828 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12829 LOGD("AEC Speed :%f", aec_speed);
12830 if ( aec_speed < 0 ) {
12831 LOGE("Invalid AEC mode %f!", aec_speed);
12832 } else {
12833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12834 aec_speed)) {
12835 rc = BAD_VALUE;
12836 }
12837 }
12838 }
12839
12840 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12841 float awb_speed;
12842 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12843 LOGD("AWB Speed :%f", awb_speed);
12844 if ( awb_speed < 0 ) {
12845 LOGE("Invalid AWB mode %f!", awb_speed);
12846 } else {
12847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12848 awb_speed)) {
12849 rc = BAD_VALUE;
12850 }
12851 }
12852 }
12853
Thierry Strudel3d639192016-09-09 11:52:26 -070012854 // TNR
12855 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12856 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12857 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012858 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012859 cam_denoise_param_t tnr;
12860 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12861 tnr.process_plates =
12862 (cam_denoise_process_type_t)frame_settings.find(
12863 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12864 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012865
12866 if(b_TnrRequested != curr_tnr_state)
12867 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12868
Thierry Strudel3d639192016-09-09 11:52:26 -070012869 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12870 rc = BAD_VALUE;
12871 }
12872 }
12873
Thierry Strudel54dc9782017-02-15 12:12:10 -080012874 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012875 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012876 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012877 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12878 *exposure_metering_mode)) {
12879 rc = BAD_VALUE;
12880 }
12881 }
12882
Thierry Strudel3d639192016-09-09 11:52:26 -070012883 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12884 int32_t fwk_testPatternMode =
12885 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12886 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12887 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12888
12889 if (NAME_NOT_FOUND != testPatternMode) {
12890 cam_test_pattern_data_t testPatternData;
12891 memset(&testPatternData, 0, sizeof(testPatternData));
12892 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12893 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12894 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12895 int32_t *fwk_testPatternData =
12896 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12897 testPatternData.r = fwk_testPatternData[0];
12898 testPatternData.b = fwk_testPatternData[3];
12899 switch (gCamCapability[mCameraId]->color_arrangement) {
12900 case CAM_FILTER_ARRANGEMENT_RGGB:
12901 case CAM_FILTER_ARRANGEMENT_GRBG:
12902 testPatternData.gr = fwk_testPatternData[1];
12903 testPatternData.gb = fwk_testPatternData[2];
12904 break;
12905 case CAM_FILTER_ARRANGEMENT_GBRG:
12906 case CAM_FILTER_ARRANGEMENT_BGGR:
12907 testPatternData.gr = fwk_testPatternData[2];
12908 testPatternData.gb = fwk_testPatternData[1];
12909 break;
12910 default:
12911 LOGE("color arrangement %d is not supported",
12912 gCamCapability[mCameraId]->color_arrangement);
12913 break;
12914 }
12915 }
12916 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12917 testPatternData)) {
12918 rc = BAD_VALUE;
12919 }
12920 } else {
12921 LOGE("Invalid framework sensor test pattern mode %d",
12922 fwk_testPatternMode);
12923 }
12924 }
12925
12926 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12927 size_t count = 0;
12928 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12929 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12930 gps_coords.data.d, gps_coords.count, count);
12931 if (gps_coords.count != count) {
12932 rc = BAD_VALUE;
12933 }
12934 }
12935
12936 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12937 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12938 size_t count = 0;
12939 const char *gps_methods_src = (const char *)
12940 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12941 memset(gps_methods, '\0', sizeof(gps_methods));
12942 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12943 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12944 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12945 if (GPS_PROCESSING_METHOD_SIZE != count) {
12946 rc = BAD_VALUE;
12947 }
12948 }
12949
12950 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12951 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12952 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12953 gps_timestamp)) {
12954 rc = BAD_VALUE;
12955 }
12956 }
12957
12958 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12959 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12960 cam_rotation_info_t rotation_info;
12961 if (orientation == 0) {
12962 rotation_info.rotation = ROTATE_0;
12963 } else if (orientation == 90) {
12964 rotation_info.rotation = ROTATE_90;
12965 } else if (orientation == 180) {
12966 rotation_info.rotation = ROTATE_180;
12967 } else if (orientation == 270) {
12968 rotation_info.rotation = ROTATE_270;
12969 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012970 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012971 rotation_info.streamId = snapshotStreamId;
12972 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12973 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12974 rc = BAD_VALUE;
12975 }
12976 }
12977
12978 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12979 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12980 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12981 rc = BAD_VALUE;
12982 }
12983 }
12984
12985 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12986 uint32_t thumb_quality = (uint32_t)
12987 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12988 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12989 thumb_quality)) {
12990 rc = BAD_VALUE;
12991 }
12992 }
12993
12994 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12995 cam_dimension_t dim;
12996 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12997 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12998 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12999 rc = BAD_VALUE;
13000 }
13001 }
13002
13003 // Internal metadata
13004 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13005 size_t count = 0;
13006 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13007 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13008 privatedata.data.i32, privatedata.count, count);
13009 if (privatedata.count != count) {
13010 rc = BAD_VALUE;
13011 }
13012 }
13013
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013014 // ISO/Exposure Priority
13015 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13016 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13017 cam_priority_mode_t mode =
13018 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13019 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13020 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13021 use_iso_exp_pty.previewOnly = FALSE;
13022 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13023 use_iso_exp_pty.value = *ptr;
13024
13025 if(CAM_ISO_PRIORITY == mode) {
13026 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13027 use_iso_exp_pty)) {
13028 rc = BAD_VALUE;
13029 }
13030 }
13031 else {
13032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13033 use_iso_exp_pty)) {
13034 rc = BAD_VALUE;
13035 }
13036 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013037
13038 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13039 rc = BAD_VALUE;
13040 }
13041 }
13042 } else {
13043 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13044 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013045 }
13046 }
13047
13048 // Saturation
13049 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13050 int32_t* use_saturation =
13051 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13052 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13053 rc = BAD_VALUE;
13054 }
13055 }
13056
Thierry Strudel3d639192016-09-09 11:52:26 -070013057 // EV step
13058 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13059 gCamCapability[mCameraId]->exp_compensation_step)) {
13060 rc = BAD_VALUE;
13061 }
13062
13063 // CDS info
13064 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13065 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13066 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13067
13068 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13069 CAM_INTF_META_CDS_DATA, *cdsData)) {
13070 rc = BAD_VALUE;
13071 }
13072 }
13073
Shuzhen Wang19463d72016-03-08 11:09:52 -080013074 // Hybrid AE
13075 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13076 uint8_t *hybrid_ae = (uint8_t *)
13077 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13078
13079 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13080 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13081 rc = BAD_VALUE;
13082 }
13083 }
13084
Shuzhen Wang14415f52016-11-16 18:26:18 -080013085 // Histogram
13086 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13087 uint8_t histogramMode =
13088 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13089 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13090 histogramMode)) {
13091 rc = BAD_VALUE;
13092 }
13093 }
13094
13095 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13096 int32_t histogramBins =
13097 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13099 histogramBins)) {
13100 rc = BAD_VALUE;
13101 }
13102 }
13103
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013104 // Tracking AF
13105 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13106 uint8_t trackingAfTrigger =
13107 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13108 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13109 trackingAfTrigger)) {
13110 rc = BAD_VALUE;
13111 }
13112 }
13113
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013114 // Makernote
13115 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13116 if (entry.count != 0) {
13117 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13118 cam_makernote_t makernote;
13119 makernote.length = entry.count;
13120 memcpy(makernote.data, entry.data.u8, makernote.length);
13121 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13122 rc = BAD_VALUE;
13123 }
13124 } else {
13125 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13126 MAX_MAKERNOTE_LENGTH);
13127 rc = BAD_VALUE;
13128 }
13129 }
13130
Thierry Strudel3d639192016-09-09 11:52:26 -070013131 return rc;
13132}
13133
13134/*===========================================================================
13135 * FUNCTION : captureResultCb
13136 *
13137 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13138 *
13139 * PARAMETERS :
13140 * @frame : frame information from mm-camera-interface
13141 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13142 * @userdata: userdata
13143 *
13144 * RETURN : NONE
13145 *==========================================================================*/
13146void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13147 camera3_stream_buffer_t *buffer,
13148 uint32_t frame_number, bool isInputBuffer, void *userdata)
13149{
13150 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13151 if (hw == NULL) {
13152 LOGE("Invalid hw %p", hw);
13153 return;
13154 }
13155
13156 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13157 return;
13158}
13159
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013160/*===========================================================================
13161 * FUNCTION : setBufferErrorStatus
13162 *
13163 * DESCRIPTION: Callback handler for channels to report any buffer errors
13164 *
13165 * PARAMETERS :
13166 * @ch : Channel on which buffer error is reported from
13167 * @frame_number : frame number on which buffer error is reported on
13168 * @buffer_status : buffer error status
13169 * @userdata: userdata
13170 *
13171 * RETURN : NONE
13172 *==========================================================================*/
13173void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13174 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13175{
13176 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13177 if (hw == NULL) {
13178 LOGE("Invalid hw %p", hw);
13179 return;
13180 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013181
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013182 hw->setBufferErrorStatus(ch, frame_number, err);
13183 return;
13184}
13185
13186void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13187 uint32_t frameNumber, camera3_buffer_status_t err)
13188{
13189 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13190 pthread_mutex_lock(&mMutex);
13191
13192 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13193 if (req.frame_number != frameNumber)
13194 continue;
13195 for (auto& k : req.mPendingBufferList) {
13196 if(k.stream->priv == ch) {
13197 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13198 }
13199 }
13200 }
13201
13202 pthread_mutex_unlock(&mMutex);
13203 return;
13204}
Thierry Strudel3d639192016-09-09 11:52:26 -070013205/*===========================================================================
13206 * FUNCTION : initialize
13207 *
13208 * DESCRIPTION: Pass framework callback pointers to HAL
13209 *
13210 * PARAMETERS :
13211 *
13212 *
13213 * RETURN : Success : 0
13214 * Failure: -ENODEV
13215 *==========================================================================*/
13216
13217int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13218 const camera3_callback_ops_t *callback_ops)
13219{
13220 LOGD("E");
13221 QCamera3HardwareInterface *hw =
13222 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13223 if (!hw) {
13224 LOGE("NULL camera device");
13225 return -ENODEV;
13226 }
13227
13228 int rc = hw->initialize(callback_ops);
13229 LOGD("X");
13230 return rc;
13231}
13232
13233/*===========================================================================
13234 * FUNCTION : configure_streams
13235 *
13236 * DESCRIPTION:
13237 *
13238 * PARAMETERS :
13239 *
13240 *
13241 * RETURN : Success: 0
13242 * Failure: -EINVAL (if stream configuration is invalid)
13243 * -ENODEV (fatal error)
13244 *==========================================================================*/
13245
13246int QCamera3HardwareInterface::configure_streams(
13247 const struct camera3_device *device,
13248 camera3_stream_configuration_t *stream_list)
13249{
13250 LOGD("E");
13251 QCamera3HardwareInterface *hw =
13252 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13253 if (!hw) {
13254 LOGE("NULL camera device");
13255 return -ENODEV;
13256 }
13257 int rc = hw->configureStreams(stream_list);
13258 LOGD("X");
13259 return rc;
13260}
13261
13262/*===========================================================================
13263 * FUNCTION : construct_default_request_settings
13264 *
13265 * DESCRIPTION: Configure a settings buffer to meet the required use case
13266 *
13267 * PARAMETERS :
13268 *
13269 *
13270 * RETURN : Success: Return valid metadata
13271 * Failure: Return NULL
13272 *==========================================================================*/
13273const camera_metadata_t* QCamera3HardwareInterface::
13274 construct_default_request_settings(const struct camera3_device *device,
13275 int type)
13276{
13277
13278 LOGD("E");
13279 camera_metadata_t* fwk_metadata = NULL;
13280 QCamera3HardwareInterface *hw =
13281 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13282 if (!hw) {
13283 LOGE("NULL camera device");
13284 return NULL;
13285 }
13286
13287 fwk_metadata = hw->translateCapabilityToMetadata(type);
13288
13289 LOGD("X");
13290 return fwk_metadata;
13291}
13292
13293/*===========================================================================
13294 * FUNCTION : process_capture_request
13295 *
13296 * DESCRIPTION:
13297 *
13298 * PARAMETERS :
13299 *
13300 *
13301 * RETURN :
13302 *==========================================================================*/
13303int QCamera3HardwareInterface::process_capture_request(
13304 const struct camera3_device *device,
13305 camera3_capture_request_t *request)
13306{
13307 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013308 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013309 QCamera3HardwareInterface *hw =
13310 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13311 if (!hw) {
13312 LOGE("NULL camera device");
13313 return -EINVAL;
13314 }
13315
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013316 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013317 LOGD("X");
13318 return rc;
13319}
13320
13321/*===========================================================================
13322 * FUNCTION : dump
13323 *
13324 * DESCRIPTION:
13325 *
13326 * PARAMETERS :
13327 *
13328 *
13329 * RETURN :
13330 *==========================================================================*/
13331
13332void QCamera3HardwareInterface::dump(
13333 const struct camera3_device *device, int fd)
13334{
13335 /* Log level property is read when "adb shell dumpsys media.camera" is
13336 called so that the log level can be controlled without restarting
13337 the media server */
13338 getLogLevel();
13339
13340 LOGD("E");
13341 QCamera3HardwareInterface *hw =
13342 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13343 if (!hw) {
13344 LOGE("NULL camera device");
13345 return;
13346 }
13347
13348 hw->dump(fd);
13349 LOGD("X");
13350 return;
13351}
13352
13353/*===========================================================================
13354 * FUNCTION : flush
13355 *
13356 * DESCRIPTION:
13357 *
13358 * PARAMETERS :
13359 *
13360 *
13361 * RETURN :
13362 *==========================================================================*/
13363
13364int QCamera3HardwareInterface::flush(
13365 const struct camera3_device *device)
13366{
13367 int rc;
13368 LOGD("E");
13369 QCamera3HardwareInterface *hw =
13370 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13371 if (!hw) {
13372 LOGE("NULL camera device");
13373 return -EINVAL;
13374 }
13375
13376 pthread_mutex_lock(&hw->mMutex);
13377 // Validate current state
13378 switch (hw->mState) {
13379 case STARTED:
13380 /* valid state */
13381 break;
13382
13383 case ERROR:
13384 pthread_mutex_unlock(&hw->mMutex);
13385 hw->handleCameraDeviceError();
13386 return -ENODEV;
13387
13388 default:
13389 LOGI("Flush returned during state %d", hw->mState);
13390 pthread_mutex_unlock(&hw->mMutex);
13391 return 0;
13392 }
13393 pthread_mutex_unlock(&hw->mMutex);
13394
13395 rc = hw->flush(true /* restart channels */ );
13396 LOGD("X");
13397 return rc;
13398}
13399
13400/*===========================================================================
13401 * FUNCTION : close_camera_device
13402 *
13403 * DESCRIPTION:
13404 *
13405 * PARAMETERS :
13406 *
13407 *
13408 * RETURN :
13409 *==========================================================================*/
13410int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13411{
13412 int ret = NO_ERROR;
13413 QCamera3HardwareInterface *hw =
13414 reinterpret_cast<QCamera3HardwareInterface *>(
13415 reinterpret_cast<camera3_device_t *>(device)->priv);
13416 if (!hw) {
13417 LOGE("NULL camera device");
13418 return BAD_VALUE;
13419 }
13420
13421 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13422 delete hw;
13423 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013424 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013425 return ret;
13426}
13427
13428/*===========================================================================
13429 * FUNCTION : getWaveletDenoiseProcessPlate
13430 *
13431 * DESCRIPTION: query wavelet denoise process plate
13432 *
13433 * PARAMETERS : None
13434 *
13435 * RETURN : WNR prcocess plate value
13436 *==========================================================================*/
13437cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13438{
13439 char prop[PROPERTY_VALUE_MAX];
13440 memset(prop, 0, sizeof(prop));
13441 property_get("persist.denoise.process.plates", prop, "0");
13442 int processPlate = atoi(prop);
13443 switch(processPlate) {
13444 case 0:
13445 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13446 case 1:
13447 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13448 case 2:
13449 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13450 case 3:
13451 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13452 default:
13453 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13454 }
13455}
13456
13457
13458/*===========================================================================
13459 * FUNCTION : getTemporalDenoiseProcessPlate
13460 *
13461 * DESCRIPTION: query temporal denoise process plate
13462 *
13463 * PARAMETERS : None
13464 *
13465 * RETURN : TNR prcocess plate value
13466 *==========================================================================*/
13467cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13468{
13469 char prop[PROPERTY_VALUE_MAX];
13470 memset(prop, 0, sizeof(prop));
13471 property_get("persist.tnr.process.plates", prop, "0");
13472 int processPlate = atoi(prop);
13473 switch(processPlate) {
13474 case 0:
13475 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13476 case 1:
13477 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13478 case 2:
13479 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13480 case 3:
13481 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13482 default:
13483 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13484 }
13485}
13486
13487
13488/*===========================================================================
13489 * FUNCTION : extractSceneMode
13490 *
13491 * DESCRIPTION: Extract scene mode from frameworks set metadata
13492 *
13493 * PARAMETERS :
13494 * @frame_settings: CameraMetadata reference
13495 * @metaMode: ANDROID_CONTORL_MODE
13496 * @hal_metadata: hal metadata structure
13497 *
13498 * RETURN : None
13499 *==========================================================================*/
13500int32_t QCamera3HardwareInterface::extractSceneMode(
13501 const CameraMetadata &frame_settings, uint8_t metaMode,
13502 metadata_buffer_t *hal_metadata)
13503{
13504 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013505 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13506
13507 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13508 LOGD("Ignoring control mode OFF_KEEP_STATE");
13509 return NO_ERROR;
13510 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013511
13512 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13513 camera_metadata_ro_entry entry =
13514 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13515 if (0 == entry.count)
13516 return rc;
13517
13518 uint8_t fwk_sceneMode = entry.data.u8[0];
13519
13520 int val = lookupHalName(SCENE_MODES_MAP,
13521 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13522 fwk_sceneMode);
13523 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013524 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013525 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013526 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013527 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013528
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013529 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13530 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13531 }
13532
13533 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13534 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013535 cam_hdr_param_t hdr_params;
13536 hdr_params.hdr_enable = 1;
13537 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13538 hdr_params.hdr_need_1x = false;
13539 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13540 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13541 rc = BAD_VALUE;
13542 }
13543 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013544
Thierry Strudel3d639192016-09-09 11:52:26 -070013545 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13546 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13547 rc = BAD_VALUE;
13548 }
13549 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013550
13551 if (mForceHdrSnapshot) {
13552 cam_hdr_param_t hdr_params;
13553 hdr_params.hdr_enable = 1;
13554 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13555 hdr_params.hdr_need_1x = false;
13556 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13557 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13558 rc = BAD_VALUE;
13559 }
13560 }
13561
Thierry Strudel3d639192016-09-09 11:52:26 -070013562 return rc;
13563}
13564
13565/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013566 * FUNCTION : setVideoHdrMode
13567 *
13568 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13569 *
13570 * PARAMETERS :
13571 * @hal_metadata: hal metadata structure
13572 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13573 *
13574 * RETURN : None
13575 *==========================================================================*/
13576int32_t QCamera3HardwareInterface::setVideoHdrMode(
13577 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13578{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013579 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13580 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13581 }
13582
13583 LOGE("Invalid Video HDR mode %d!", vhdr);
13584 return BAD_VALUE;
13585}
13586
13587/*===========================================================================
13588 * FUNCTION : setSensorHDR
13589 *
13590 * DESCRIPTION: Enable/disable sensor HDR.
13591 *
13592 * PARAMETERS :
13593 * @hal_metadata: hal metadata structure
13594 * @enable: boolean whether to enable/disable sensor HDR
13595 *
13596 * RETURN : None
13597 *==========================================================================*/
13598int32_t QCamera3HardwareInterface::setSensorHDR(
13599 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13600{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013601 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013602 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13603
13604 if (enable) {
13605 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13606 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13607 #ifdef _LE_CAMERA_
13608 //Default to staggered HDR for IOT
13609 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13610 #else
13611 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13612 #endif
13613 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13614 }
13615
13616 bool isSupported = false;
13617 switch (sensor_hdr) {
13618 case CAM_SENSOR_HDR_IN_SENSOR:
13619 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13620 CAM_QCOM_FEATURE_SENSOR_HDR) {
13621 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013622 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013623 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013624 break;
13625 case CAM_SENSOR_HDR_ZIGZAG:
13626 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13627 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13628 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013629 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013630 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013631 break;
13632 case CAM_SENSOR_HDR_STAGGERED:
13633 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13634 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13635 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013636 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013637 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013638 break;
13639 case CAM_SENSOR_HDR_OFF:
13640 isSupported = true;
13641 LOGD("Turning off sensor HDR");
13642 break;
13643 default:
13644 LOGE("HDR mode %d not supported", sensor_hdr);
13645 rc = BAD_VALUE;
13646 break;
13647 }
13648
13649 if(isSupported) {
13650 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13651 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13652 rc = BAD_VALUE;
13653 } else {
13654 if(!isVideoHdrEnable)
13655 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013656 }
13657 }
13658 return rc;
13659}
13660
13661/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013662 * FUNCTION : needRotationReprocess
13663 *
13664 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13665 *
13666 * PARAMETERS : none
13667 *
13668 * RETURN : true: needed
13669 * false: no need
13670 *==========================================================================*/
13671bool QCamera3HardwareInterface::needRotationReprocess()
13672{
13673 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13674 // current rotation is not zero, and pp has the capability to process rotation
13675 LOGH("need do reprocess for rotation");
13676 return true;
13677 }
13678
13679 return false;
13680}
13681
13682/*===========================================================================
13683 * FUNCTION : needReprocess
13684 *
13685 * DESCRIPTION: if reprocess in needed
13686 *
13687 * PARAMETERS : none
13688 *
13689 * RETURN : true: needed
13690 * false: no need
13691 *==========================================================================*/
13692bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13693{
13694 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13695 // TODO: add for ZSL HDR later
13696 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13697 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13698 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13699 return true;
13700 } else {
13701 LOGH("already post processed frame");
13702 return false;
13703 }
13704 }
13705 return needRotationReprocess();
13706}
13707
13708/*===========================================================================
13709 * FUNCTION : needJpegExifRotation
13710 *
13711 * DESCRIPTION: if rotation from jpeg is needed
13712 *
13713 * PARAMETERS : none
13714 *
13715 * RETURN : true: needed
13716 * false: no need
13717 *==========================================================================*/
13718bool QCamera3HardwareInterface::needJpegExifRotation()
13719{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013720 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013721 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13722 LOGD("Need use Jpeg EXIF Rotation");
13723 return true;
13724 }
13725 return false;
13726}
13727
13728/*===========================================================================
13729 * FUNCTION : addOfflineReprocChannel
13730 *
13731 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13732 * coming from input channel
13733 *
13734 * PARAMETERS :
13735 * @config : reprocess configuration
13736 * @inputChHandle : pointer to the input (source) channel
13737 *
13738 *
13739 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13740 *==========================================================================*/
13741QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13742 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13743{
13744 int32_t rc = NO_ERROR;
13745 QCamera3ReprocessChannel *pChannel = NULL;
13746
13747 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013748 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13749 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013750 if (NULL == pChannel) {
13751 LOGE("no mem for reprocess channel");
13752 return NULL;
13753 }
13754
13755 rc = pChannel->initialize(IS_TYPE_NONE);
13756 if (rc != NO_ERROR) {
13757 LOGE("init reprocess channel failed, ret = %d", rc);
13758 delete pChannel;
13759 return NULL;
13760 }
13761
13762 // pp feature config
13763 cam_pp_feature_config_t pp_config;
13764 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13765
13766 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13767 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13768 & CAM_QCOM_FEATURE_DSDN) {
13769 //Use CPP CDS incase h/w supports it.
13770 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13771 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13772 }
13773 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13774 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13775 }
13776
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013777 if (config.hdr_param.hdr_enable) {
13778 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13779 pp_config.hdr_param = config.hdr_param;
13780 }
13781
13782 if (mForceHdrSnapshot) {
13783 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13784 pp_config.hdr_param.hdr_enable = 1;
13785 pp_config.hdr_param.hdr_need_1x = 0;
13786 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13787 }
13788
Thierry Strudel3d639192016-09-09 11:52:26 -070013789 rc = pChannel->addReprocStreamsFromSource(pp_config,
13790 config,
13791 IS_TYPE_NONE,
13792 mMetadataChannel);
13793
13794 if (rc != NO_ERROR) {
13795 delete pChannel;
13796 return NULL;
13797 }
13798 return pChannel;
13799}
13800
13801/*===========================================================================
13802 * FUNCTION : getMobicatMask
13803 *
13804 * DESCRIPTION: returns mobicat mask
13805 *
13806 * PARAMETERS : none
13807 *
13808 * RETURN : mobicat mask
13809 *
13810 *==========================================================================*/
13811uint8_t QCamera3HardwareInterface::getMobicatMask()
13812{
13813 return m_MobicatMask;
13814}
13815
13816/*===========================================================================
13817 * FUNCTION : setMobicat
13818 *
13819 * DESCRIPTION: set Mobicat on/off.
13820 *
13821 * PARAMETERS :
13822 * @params : none
13823 *
13824 * RETURN : int32_t type of status
13825 * NO_ERROR -- success
13826 * none-zero failure code
13827 *==========================================================================*/
13828int32_t QCamera3HardwareInterface::setMobicat()
13829{
Thierry Strudel3d639192016-09-09 11:52:26 -070013830 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013831
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013832 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013833 tune_cmd_t tune_cmd;
13834 tune_cmd.type = SET_RELOAD_CHROMATIX;
13835 tune_cmd.module = MODULE_ALL;
13836 tune_cmd.value = TRUE;
13837 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13838 CAM_INTF_PARM_SET_VFE_COMMAND,
13839 tune_cmd);
13840
13841 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13842 CAM_INTF_PARM_SET_PP_COMMAND,
13843 tune_cmd);
13844 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013845
13846 return ret;
13847}
13848
13849/*===========================================================================
13850* FUNCTION : getLogLevel
13851*
13852* DESCRIPTION: Reads the log level property into a variable
13853*
13854* PARAMETERS :
13855* None
13856*
13857* RETURN :
13858* None
13859*==========================================================================*/
13860void QCamera3HardwareInterface::getLogLevel()
13861{
13862 char prop[PROPERTY_VALUE_MAX];
13863 uint32_t globalLogLevel = 0;
13864
13865 property_get("persist.camera.hal.debug", prop, "0");
13866 int val = atoi(prop);
13867 if (0 <= val) {
13868 gCamHal3LogLevel = (uint32_t)val;
13869 }
13870
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013871 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013872 gKpiDebugLevel = atoi(prop);
13873
13874 property_get("persist.camera.global.debug", prop, "0");
13875 val = atoi(prop);
13876 if (0 <= val) {
13877 globalLogLevel = (uint32_t)val;
13878 }
13879
13880 /* Highest log level among hal.logs and global.logs is selected */
13881 if (gCamHal3LogLevel < globalLogLevel)
13882 gCamHal3LogLevel = globalLogLevel;
13883
13884 return;
13885}
13886
13887/*===========================================================================
13888 * FUNCTION : validateStreamRotations
13889 *
13890 * DESCRIPTION: Check if the rotations requested are supported
13891 *
13892 * PARAMETERS :
13893 * @stream_list : streams to be configured
13894 *
13895 * RETURN : NO_ERROR on success
13896 * -EINVAL on failure
13897 *
13898 *==========================================================================*/
13899int QCamera3HardwareInterface::validateStreamRotations(
13900 camera3_stream_configuration_t *streamList)
13901{
13902 int rc = NO_ERROR;
13903
13904 /*
13905 * Loop through all streams requested in configuration
13906 * Check if unsupported rotations have been requested on any of them
13907 */
13908 for (size_t j = 0; j < streamList->num_streams; j++){
13909 camera3_stream_t *newStream = streamList->streams[j];
13910
Emilian Peev35ceeed2017-06-29 11:58:56 -070013911 switch(newStream->rotation) {
13912 case CAMERA3_STREAM_ROTATION_0:
13913 case CAMERA3_STREAM_ROTATION_90:
13914 case CAMERA3_STREAM_ROTATION_180:
13915 case CAMERA3_STREAM_ROTATION_270:
13916 //Expected values
13917 break;
13918 default:
13919 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13920 "type:%d and stream format:%d", __func__,
13921 newStream->rotation, newStream->stream_type,
13922 newStream->format);
13923 return -EINVAL;
13924 }
13925
Thierry Strudel3d639192016-09-09 11:52:26 -070013926 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13927 bool isImplDef = (newStream->format ==
13928 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13929 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13930 isImplDef);
13931
13932 if (isRotated && (!isImplDef || isZsl)) {
13933 LOGE("Error: Unsupported rotation of %d requested for stream"
13934 "type:%d and stream format:%d",
13935 newStream->rotation, newStream->stream_type,
13936 newStream->format);
13937 rc = -EINVAL;
13938 break;
13939 }
13940 }
13941
13942 return rc;
13943}
13944
13945/*===========================================================================
13946* FUNCTION : getFlashInfo
13947*
13948* DESCRIPTION: Retrieve information about whether the device has a flash.
13949*
13950* PARAMETERS :
13951* @cameraId : Camera id to query
13952* @hasFlash : Boolean indicating whether there is a flash device
13953* associated with given camera
13954* @flashNode : If a flash device exists, this will be its device node.
13955*
13956* RETURN :
13957* None
13958*==========================================================================*/
13959void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13960 bool& hasFlash,
13961 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13962{
13963 cam_capability_t* camCapability = gCamCapability[cameraId];
13964 if (NULL == camCapability) {
13965 hasFlash = false;
13966 flashNode[0] = '\0';
13967 } else {
13968 hasFlash = camCapability->flash_available;
13969 strlcpy(flashNode,
13970 (char*)camCapability->flash_dev_name,
13971 QCAMERA_MAX_FILEPATH_LENGTH);
13972 }
13973}
13974
13975/*===========================================================================
13976* FUNCTION : getEepromVersionInfo
13977*
13978* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13979*
13980* PARAMETERS : None
13981*
13982* RETURN : string describing EEPROM version
13983* "\0" if no such info available
13984*==========================================================================*/
13985const char *QCamera3HardwareInterface::getEepromVersionInfo()
13986{
13987 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13988}
13989
13990/*===========================================================================
13991* FUNCTION : getLdafCalib
13992*
13993* DESCRIPTION: Retrieve Laser AF calibration data
13994*
13995* PARAMETERS : None
13996*
13997* RETURN : Two uint32_t describing laser AF calibration data
13998* NULL if none is available.
13999*==========================================================================*/
14000const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14001{
14002 if (mLdafCalibExist) {
14003 return &mLdafCalib[0];
14004 } else {
14005 return NULL;
14006 }
14007}
14008
14009/*===========================================================================
14010 * FUNCTION : dynamicUpdateMetaStreamInfo
14011 *
14012 * DESCRIPTION: This function:
14013 * (1) stops all the channels
14014 * (2) returns error on pending requests and buffers
14015 * (3) sends metastream_info in setparams
14016 * (4) starts all channels
14017 * This is useful when sensor has to be restarted to apply any
14018 * settings such as frame rate from a different sensor mode
14019 *
14020 * PARAMETERS : None
14021 *
14022 * RETURN : NO_ERROR on success
14023 * Error codes on failure
14024 *
14025 *==========================================================================*/
14026int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14027{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014028 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014029 int rc = NO_ERROR;
14030
14031 LOGD("E");
14032
14033 rc = stopAllChannels();
14034 if (rc < 0) {
14035 LOGE("stopAllChannels failed");
14036 return rc;
14037 }
14038
14039 rc = notifyErrorForPendingRequests();
14040 if (rc < 0) {
14041 LOGE("notifyErrorForPendingRequests failed");
14042 return rc;
14043 }
14044
14045 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14046 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14047 "Format:%d",
14048 mStreamConfigInfo.type[i],
14049 mStreamConfigInfo.stream_sizes[i].width,
14050 mStreamConfigInfo.stream_sizes[i].height,
14051 mStreamConfigInfo.postprocess_mask[i],
14052 mStreamConfigInfo.format[i]);
14053 }
14054
14055 /* Send meta stream info once again so that ISP can start */
14056 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14057 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14058 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14059 mParameters);
14060 if (rc < 0) {
14061 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14062 }
14063
14064 rc = startAllChannels();
14065 if (rc < 0) {
14066 LOGE("startAllChannels failed");
14067 return rc;
14068 }
14069
14070 LOGD("X");
14071 return rc;
14072}
14073
14074/*===========================================================================
14075 * FUNCTION : stopAllChannels
14076 *
14077 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14078 *
14079 * PARAMETERS : None
14080 *
14081 * RETURN : NO_ERROR on success
14082 * Error codes on failure
14083 *
14084 *==========================================================================*/
14085int32_t QCamera3HardwareInterface::stopAllChannels()
14086{
14087 int32_t rc = NO_ERROR;
14088
14089 LOGD("Stopping all channels");
14090 // Stop the Streams/Channels
14091 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14092 it != mStreamInfo.end(); it++) {
14093 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14094 if (channel) {
14095 channel->stop();
14096 }
14097 (*it)->status = INVALID;
14098 }
14099
14100 if (mSupportChannel) {
14101 mSupportChannel->stop();
14102 }
14103 if (mAnalysisChannel) {
14104 mAnalysisChannel->stop();
14105 }
14106 if (mRawDumpChannel) {
14107 mRawDumpChannel->stop();
14108 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014109 if (mHdrPlusRawSrcChannel) {
14110 mHdrPlusRawSrcChannel->stop();
14111 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014112 if (mMetadataChannel) {
14113 /* If content of mStreamInfo is not 0, there is metadata stream */
14114 mMetadataChannel->stop();
14115 }
14116
14117 LOGD("All channels stopped");
14118 return rc;
14119}
14120
14121/*===========================================================================
14122 * FUNCTION : startAllChannels
14123 *
14124 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14125 *
14126 * PARAMETERS : None
14127 *
14128 * RETURN : NO_ERROR on success
14129 * Error codes on failure
14130 *
14131 *==========================================================================*/
14132int32_t QCamera3HardwareInterface::startAllChannels()
14133{
14134 int32_t rc = NO_ERROR;
14135
14136 LOGD("Start all channels ");
14137 // Start the Streams/Channels
14138 if (mMetadataChannel) {
14139 /* If content of mStreamInfo is not 0, there is metadata stream */
14140 rc = mMetadataChannel->start();
14141 if (rc < 0) {
14142 LOGE("META channel start failed");
14143 return rc;
14144 }
14145 }
14146 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14147 it != mStreamInfo.end(); it++) {
14148 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14149 if (channel) {
14150 rc = channel->start();
14151 if (rc < 0) {
14152 LOGE("channel start failed");
14153 return rc;
14154 }
14155 }
14156 }
14157 if (mAnalysisChannel) {
14158 mAnalysisChannel->start();
14159 }
14160 if (mSupportChannel) {
14161 rc = mSupportChannel->start();
14162 if (rc < 0) {
14163 LOGE("Support channel start failed");
14164 return rc;
14165 }
14166 }
14167 if (mRawDumpChannel) {
14168 rc = mRawDumpChannel->start();
14169 if (rc < 0) {
14170 LOGE("RAW dump channel start failed");
14171 return rc;
14172 }
14173 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014174 if (mHdrPlusRawSrcChannel) {
14175 rc = mHdrPlusRawSrcChannel->start();
14176 if (rc < 0) {
14177 LOGE("HDR+ RAW channel start failed");
14178 return rc;
14179 }
14180 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014181
14182 LOGD("All channels started");
14183 return rc;
14184}
14185
14186/*===========================================================================
14187 * FUNCTION : notifyErrorForPendingRequests
14188 *
14189 * DESCRIPTION: This function sends error for all the pending requests/buffers
14190 *
14191 * PARAMETERS : None
14192 *
14193 * RETURN : Error codes
14194 * NO_ERROR on success
14195 *
14196 *==========================================================================*/
14197int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14198{
Emilian Peev7650c122017-01-19 08:24:33 -080014199 notifyErrorFoPendingDepthData(mDepthChannel);
14200
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014201 auto pendingRequest = mPendingRequestsList.begin();
14202 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014203
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014204 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14205 // buffers (for which buffers aren't sent yet).
14206 while (pendingRequest != mPendingRequestsList.end() ||
14207 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14208 if (pendingRequest == mPendingRequestsList.end() ||
14209 pendingBuffer->frame_number < pendingRequest->frame_number) {
14210 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14211 // with error.
14212 for (auto &info : pendingBuffer->mPendingBufferList) {
14213 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014214 camera3_notify_msg_t notify_msg;
14215 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14216 notify_msg.type = CAMERA3_MSG_ERROR;
14217 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014218 notify_msg.message.error.error_stream = info.stream;
14219 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014220 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014221
14222 camera3_stream_buffer_t buffer = {};
14223 buffer.acquire_fence = -1;
14224 buffer.release_fence = -1;
14225 buffer.buffer = info.buffer;
14226 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14227 buffer.stream = info.stream;
14228 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014229 }
14230
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014231 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14232 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14233 pendingBuffer->frame_number > pendingRequest->frame_number) {
14234 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014235 camera3_notify_msg_t notify_msg;
14236 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14237 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014238 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14239 notify_msg.message.error.error_stream = nullptr;
14240 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014241 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014242
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014243 if (pendingRequest->input_buffer != nullptr) {
14244 camera3_capture_result result = {};
14245 result.frame_number = pendingRequest->frame_number;
14246 result.result = nullptr;
14247 result.input_buffer = pendingRequest->input_buffer;
14248 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014249 }
14250
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014251 mShutterDispatcher.clear(pendingRequest->frame_number);
14252 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14253 } else {
14254 // If both buffers and result metadata weren't sent yet, notify about a request error
14255 // and return buffers with error.
14256 for (auto &info : pendingBuffer->mPendingBufferList) {
14257 camera3_notify_msg_t notify_msg;
14258 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14259 notify_msg.type = CAMERA3_MSG_ERROR;
14260 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14261 notify_msg.message.error.error_stream = info.stream;
14262 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14263 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014264
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014265 camera3_stream_buffer_t buffer = {};
14266 buffer.acquire_fence = -1;
14267 buffer.release_fence = -1;
14268 buffer.buffer = info.buffer;
14269 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14270 buffer.stream = info.stream;
14271 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14272 }
14273
14274 if (pendingRequest->input_buffer != nullptr) {
14275 camera3_capture_result result = {};
14276 result.frame_number = pendingRequest->frame_number;
14277 result.result = nullptr;
14278 result.input_buffer = pendingRequest->input_buffer;
14279 orchestrateResult(&result);
14280 }
14281
14282 mShutterDispatcher.clear(pendingRequest->frame_number);
14283 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14284 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014285 }
14286 }
14287
14288 /* Reset pending frame Drop list and requests list */
14289 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014290 mShutterDispatcher.clear();
14291 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014292 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014293 mExpectedFrameDuration = 0;
14294 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014295 LOGH("Cleared all the pending buffers ");
14296
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014297 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014298}
14299
14300bool QCamera3HardwareInterface::isOnEncoder(
14301 const cam_dimension_t max_viewfinder_size,
14302 uint32_t width, uint32_t height)
14303{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014304 return ((width > (uint32_t)max_viewfinder_size.width) ||
14305 (height > (uint32_t)max_viewfinder_size.height) ||
14306 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14307 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014308}
14309
14310/*===========================================================================
14311 * FUNCTION : setBundleInfo
14312 *
14313 * DESCRIPTION: Set bundle info for all streams that are bundle.
14314 *
14315 * PARAMETERS : None
14316 *
14317 * RETURN : NO_ERROR on success
14318 * Error codes on failure
14319 *==========================================================================*/
14320int32_t QCamera3HardwareInterface::setBundleInfo()
14321{
14322 int32_t rc = NO_ERROR;
14323
14324 if (mChannelHandle) {
14325 cam_bundle_config_t bundleInfo;
14326 memset(&bundleInfo, 0, sizeof(bundleInfo));
14327 rc = mCameraHandle->ops->get_bundle_info(
14328 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14329 if (rc != NO_ERROR) {
14330 LOGE("get_bundle_info failed");
14331 return rc;
14332 }
14333 if (mAnalysisChannel) {
14334 mAnalysisChannel->setBundleInfo(bundleInfo);
14335 }
14336 if (mSupportChannel) {
14337 mSupportChannel->setBundleInfo(bundleInfo);
14338 }
14339 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14340 it != mStreamInfo.end(); it++) {
14341 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14342 channel->setBundleInfo(bundleInfo);
14343 }
14344 if (mRawDumpChannel) {
14345 mRawDumpChannel->setBundleInfo(bundleInfo);
14346 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014347 if (mHdrPlusRawSrcChannel) {
14348 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14349 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014350 }
14351
14352 return rc;
14353}
14354
14355/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014356 * FUNCTION : setInstantAEC
14357 *
14358 * DESCRIPTION: Set Instant AEC related params.
14359 *
14360 * PARAMETERS :
14361 * @meta: CameraMetadata reference
14362 *
14363 * RETURN : NO_ERROR on success
14364 * Error codes on failure
14365 *==========================================================================*/
14366int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14367{
14368 int32_t rc = NO_ERROR;
14369 uint8_t val = 0;
14370 char prop[PROPERTY_VALUE_MAX];
14371
14372 // First try to configure instant AEC from framework metadata
14373 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14374 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14375 }
14376
14377 // If framework did not set this value, try to read from set prop.
14378 if (val == 0) {
14379 memset(prop, 0, sizeof(prop));
14380 property_get("persist.camera.instant.aec", prop, "0");
14381 val = (uint8_t)atoi(prop);
14382 }
14383
14384 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14385 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14386 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14387 mInstantAEC = val;
14388 mInstantAECSettledFrameNumber = 0;
14389 mInstantAecFrameIdxCount = 0;
14390 LOGH("instantAEC value set %d",val);
14391 if (mInstantAEC) {
14392 memset(prop, 0, sizeof(prop));
14393 property_get("persist.camera.ae.instant.bound", prop, "10");
14394 int32_t aec_frame_skip_cnt = atoi(prop);
14395 if (aec_frame_skip_cnt >= 0) {
14396 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14397 } else {
14398 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14399 rc = BAD_VALUE;
14400 }
14401 }
14402 } else {
14403 LOGE("Bad instant aec value set %d", val);
14404 rc = BAD_VALUE;
14405 }
14406 return rc;
14407}
14408
14409/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014410 * FUNCTION : get_num_overall_buffers
14411 *
14412 * DESCRIPTION: Estimate number of pending buffers across all requests.
14413 *
14414 * PARAMETERS : None
14415 *
14416 * RETURN : Number of overall pending buffers
14417 *
14418 *==========================================================================*/
14419uint32_t PendingBuffersMap::get_num_overall_buffers()
14420{
14421 uint32_t sum_buffers = 0;
14422 for (auto &req : mPendingBuffersInRequest) {
14423 sum_buffers += req.mPendingBufferList.size();
14424 }
14425 return sum_buffers;
14426}
14427
14428/*===========================================================================
14429 * FUNCTION : removeBuf
14430 *
14431 * DESCRIPTION: Remove a matching buffer from tracker.
14432 *
14433 * PARAMETERS : @buffer: image buffer for the callback
14434 *
14435 * RETURN : None
14436 *
14437 *==========================================================================*/
14438void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14439{
14440 bool buffer_found = false;
14441 for (auto req = mPendingBuffersInRequest.begin();
14442 req != mPendingBuffersInRequest.end(); req++) {
14443 for (auto k = req->mPendingBufferList.begin();
14444 k != req->mPendingBufferList.end(); k++ ) {
14445 if (k->buffer == buffer) {
14446 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14447 req->frame_number, buffer);
14448 k = req->mPendingBufferList.erase(k);
14449 if (req->mPendingBufferList.empty()) {
14450 // Remove this request from Map
14451 req = mPendingBuffersInRequest.erase(req);
14452 }
14453 buffer_found = true;
14454 break;
14455 }
14456 }
14457 if (buffer_found) {
14458 break;
14459 }
14460 }
14461 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14462 get_num_overall_buffers());
14463}
14464
14465/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014466 * FUNCTION : getBufErrStatus
14467 *
14468 * DESCRIPTION: get buffer error status
14469 *
14470 * PARAMETERS : @buffer: buffer handle
14471 *
14472 * RETURN : Error status
14473 *
14474 *==========================================================================*/
14475int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14476{
14477 for (auto& req : mPendingBuffersInRequest) {
14478 for (auto& k : req.mPendingBufferList) {
14479 if (k.buffer == buffer)
14480 return k.bufStatus;
14481 }
14482 }
14483 return CAMERA3_BUFFER_STATUS_OK;
14484}
14485
14486/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014487 * FUNCTION : setPAAFSupport
14488 *
14489 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14490 * feature mask according to stream type and filter
14491 * arrangement
14492 *
14493 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14494 * @stream_type: stream type
14495 * @filter_arrangement: filter arrangement
14496 *
14497 * RETURN : None
14498 *==========================================================================*/
14499void QCamera3HardwareInterface::setPAAFSupport(
14500 cam_feature_mask_t& feature_mask,
14501 cam_stream_type_t stream_type,
14502 cam_color_filter_arrangement_t filter_arrangement)
14503{
Thierry Strudel3d639192016-09-09 11:52:26 -070014504 switch (filter_arrangement) {
14505 case CAM_FILTER_ARRANGEMENT_RGGB:
14506 case CAM_FILTER_ARRANGEMENT_GRBG:
14507 case CAM_FILTER_ARRANGEMENT_GBRG:
14508 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014509 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14510 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014511 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014512 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14513 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014514 }
14515 break;
14516 case CAM_FILTER_ARRANGEMENT_Y:
14517 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14518 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14519 }
14520 break;
14521 default:
14522 break;
14523 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014524 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14525 feature_mask, stream_type, filter_arrangement);
14526
14527
Thierry Strudel3d639192016-09-09 11:52:26 -070014528}
14529
14530/*===========================================================================
14531* FUNCTION : getSensorMountAngle
14532*
14533* DESCRIPTION: Retrieve sensor mount angle
14534*
14535* PARAMETERS : None
14536*
14537* RETURN : sensor mount angle in uint32_t
14538*==========================================================================*/
14539uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14540{
14541 return gCamCapability[mCameraId]->sensor_mount_angle;
14542}
14543
14544/*===========================================================================
14545* FUNCTION : getRelatedCalibrationData
14546*
14547* DESCRIPTION: Retrieve related system calibration data
14548*
14549* PARAMETERS : None
14550*
14551* RETURN : Pointer of related system calibration data
14552*==========================================================================*/
14553const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14554{
14555 return (const cam_related_system_calibration_data_t *)
14556 &(gCamCapability[mCameraId]->related_cam_calibration);
14557}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014558
14559/*===========================================================================
14560 * FUNCTION : is60HzZone
14561 *
14562 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14563 *
14564 * PARAMETERS : None
14565 *
14566 * RETURN : True if in 60Hz zone, False otherwise
14567 *==========================================================================*/
14568bool QCamera3HardwareInterface::is60HzZone()
14569{
14570 time_t t = time(NULL);
14571 struct tm lt;
14572
14573 struct tm* r = localtime_r(&t, &lt);
14574
14575 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14576 return true;
14577 else
14578 return false;
14579}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014580
14581/*===========================================================================
14582 * FUNCTION : adjustBlackLevelForCFA
14583 *
14584 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14585 * of bayer CFA (Color Filter Array).
14586 *
14587 * PARAMETERS : @input: black level pattern in the order of RGGB
14588 * @output: black level pattern in the order of CFA
14589 * @color_arrangement: CFA color arrangement
14590 *
14591 * RETURN : None
14592 *==========================================================================*/
14593template<typename T>
14594void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14595 T input[BLACK_LEVEL_PATTERN_CNT],
14596 T output[BLACK_LEVEL_PATTERN_CNT],
14597 cam_color_filter_arrangement_t color_arrangement)
14598{
14599 switch (color_arrangement) {
14600 case CAM_FILTER_ARRANGEMENT_GRBG:
14601 output[0] = input[1];
14602 output[1] = input[0];
14603 output[2] = input[3];
14604 output[3] = input[2];
14605 break;
14606 case CAM_FILTER_ARRANGEMENT_GBRG:
14607 output[0] = input[2];
14608 output[1] = input[3];
14609 output[2] = input[0];
14610 output[3] = input[1];
14611 break;
14612 case CAM_FILTER_ARRANGEMENT_BGGR:
14613 output[0] = input[3];
14614 output[1] = input[2];
14615 output[2] = input[1];
14616 output[3] = input[0];
14617 break;
14618 case CAM_FILTER_ARRANGEMENT_RGGB:
14619 output[0] = input[0];
14620 output[1] = input[1];
14621 output[2] = input[2];
14622 output[3] = input[3];
14623 break;
14624 default:
14625 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14626 break;
14627 }
14628}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014629
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014630void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14631 CameraMetadata &resultMetadata,
14632 std::shared_ptr<metadata_buffer_t> settings)
14633{
14634 if (settings == nullptr) {
14635 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14636 return;
14637 }
14638
14639 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14640 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14641 }
14642
14643 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14644 String8 str((const char *)gps_methods);
14645 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14646 }
14647
14648 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14649 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14650 }
14651
14652 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14653 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14654 }
14655
14656 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14657 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14658 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14659 }
14660
14661 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14662 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14663 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14664 }
14665
14666 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14667 int32_t fwk_thumb_size[2];
14668 fwk_thumb_size[0] = thumb_size->width;
14669 fwk_thumb_size[1] = thumb_size->height;
14670 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14671 }
14672
14673 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14674 uint8_t fwk_intent = intent[0];
14675 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14676 }
14677}
14678
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014679bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14680 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14681 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014682{
14683 if (hdrPlusRequest == nullptr) return false;
14684
14685 // Check noise reduction mode is high quality.
14686 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14687 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14688 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014689 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14690 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014691 return false;
14692 }
14693
14694 // Check edge mode is high quality.
14695 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14696 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14697 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14698 return false;
14699 }
14700
14701 if (request.num_output_buffers != 1 ||
14702 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14703 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014704 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14705 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14706 request.output_buffers[0].stream->width,
14707 request.output_buffers[0].stream->height,
14708 request.output_buffers[0].stream->format);
14709 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014710 return false;
14711 }
14712
14713 // Get a YUV buffer from pic channel.
14714 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14715 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14716 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14717 if (res != OK) {
14718 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14719 __FUNCTION__, strerror(-res), res);
14720 return false;
14721 }
14722
14723 pbcamera::StreamBuffer buffer;
14724 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014725 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chencec36ed2017-07-21 13:54:29 -070014726 buffer.data = yuvBuffer->fd == -1 ? yuvBuffer->buffer : nullptr;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014727 buffer.dataSize = yuvBuffer->frame_len;
14728
14729 pbcamera::CaptureRequest pbRequest;
14730 pbRequest.id = request.frame_number;
14731 pbRequest.outputBuffers.push_back(buffer);
14732
14733 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070014734 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014735 if (res != OK) {
14736 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14737 strerror(-res), res);
14738 return false;
14739 }
14740
14741 hdrPlusRequest->yuvBuffer = yuvBuffer;
14742 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14743
14744 return true;
14745}
14746
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014747status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14748{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014749 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14750 return OK;
14751 }
14752
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014753 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014754 if (res != OK) {
14755 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14756 strerror(-res), res);
14757 return res;
14758 }
14759 gHdrPlusClientOpening = true;
14760
14761 return OK;
14762}
14763
Chien-Yu Chenee335912017-02-09 17:53:20 -080014764status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14765{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014766 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014767
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014768 if (mHdrPlusModeEnabled) {
14769 return OK;
14770 }
14771
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014772 // Check if gHdrPlusClient is opened or being opened.
14773 if (gHdrPlusClient == nullptr) {
14774 if (gHdrPlusClientOpening) {
14775 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14776 return OK;
14777 }
14778
14779 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014780 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014781 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14782 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014783 return res;
14784 }
14785
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014786 // When opening HDR+ client completes, HDR+ mode will be enabled.
14787 return OK;
14788
Chien-Yu Chenee335912017-02-09 17:53:20 -080014789 }
14790
14791 // Configure stream for HDR+.
14792 res = configureHdrPlusStreamsLocked();
14793 if (res != OK) {
14794 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014795 return res;
14796 }
14797
14798 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14799 res = gHdrPlusClient->setZslHdrPlusMode(true);
14800 if (res != OK) {
14801 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014802 return res;
14803 }
14804
14805 mHdrPlusModeEnabled = true;
14806 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14807
14808 return OK;
14809}
14810
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014811void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14812{
14813 if (gHdrPlusClientOpening) {
14814 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14815 }
14816}
14817
Chien-Yu Chenee335912017-02-09 17:53:20 -080014818void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14819{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014820 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014821 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014822 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14823 if (res != OK) {
14824 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14825 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014826
14827 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014828 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014829 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014830 }
14831
14832 mHdrPlusModeEnabled = false;
14833 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14834}
14835
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070014836bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
14837{
14838 // Check if mPictureChannel is valid.
14839 // TODO: Support YUV (b/36693254) and RAW (b/36690506)
14840 if (mPictureChannel == nullptr) {
14841 return false;
14842 }
14843
14844 return true;
14845}
14846
Chien-Yu Chenee335912017-02-09 17:53:20 -080014847status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014848{
14849 pbcamera::InputConfiguration inputConfig;
14850 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14851 status_t res = OK;
14852
14853 // Configure HDR+ client streams.
14854 // Get input config.
14855 if (mHdrPlusRawSrcChannel) {
14856 // HDR+ input buffers will be provided by HAL.
14857 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14858 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14859 if (res != OK) {
14860 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14861 __FUNCTION__, strerror(-res), res);
14862 return res;
14863 }
14864
14865 inputConfig.isSensorInput = false;
14866 } else {
14867 // Sensor MIPI will send data to Easel.
14868 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014869 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014870 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14871 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14872 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14873 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14874 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014875 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014876 if (mSensorModeInfo.num_raw_bits != 10) {
14877 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14878 mSensorModeInfo.num_raw_bits);
14879 return BAD_VALUE;
14880 }
14881
14882 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014883 }
14884
14885 // Get output configurations.
14886 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014887 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014888
14889 // Easel may need to output YUV output buffers if mPictureChannel was created.
14890 pbcamera::StreamConfiguration yuvOutputConfig;
14891 if (mPictureChannel != nullptr) {
14892 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14893 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14894 if (res != OK) {
14895 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14896 __FUNCTION__, strerror(-res), res);
14897
14898 return res;
14899 }
14900
14901 outputStreamConfigs.push_back(yuvOutputConfig);
14902 }
14903
14904 // TODO: consider other channels for YUV output buffers.
14905
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014906 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014907 if (res != OK) {
14908 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14909 strerror(-res), res);
14910 return res;
14911 }
14912
14913 return OK;
14914}
14915
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070014916void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
14917{
14918 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
14919 // Set HAL state to error.
14920 pthread_mutex_lock(&mMutex);
14921 mState = ERROR;
14922 pthread_mutex_unlock(&mMutex);
14923
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014924 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070014925}
14926
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014927void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14928{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014929 if (client == nullptr) {
14930 ALOGE("%s: Opened client is null.", __FUNCTION__);
14931 return;
14932 }
14933
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014934 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014935 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14936
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014937 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014938 if (!gHdrPlusClientOpening) {
14939 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14940 return;
14941 }
14942
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014943 gHdrPlusClient = std::move(client);
14944 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014945 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014946
14947 // Set static metadata.
14948 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14949 if (res != OK) {
14950 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14951 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014952 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014953 gHdrPlusClient = nullptr;
14954 return;
14955 }
14956
14957 // Enable HDR+ mode.
14958 res = enableHdrPlusModeLocked();
14959 if (res != OK) {
14960 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14961 }
14962}
14963
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014964void QCamera3HardwareInterface::onOpenFailed(status_t err)
14965{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014966 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014967 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014968 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014969 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014970}
14971
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014972void QCamera3HardwareInterface::onFatalError()
14973{
14974 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14975
14976 // Set HAL state to error.
14977 pthread_mutex_lock(&mMutex);
14978 mState = ERROR;
14979 pthread_mutex_unlock(&mMutex);
14980
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014981 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014982}
14983
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070014984void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
14985{
14986 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
14987 __LINE__, requestId, apSensorTimestampNs);
14988
14989 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
14990}
14991
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014992void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014993 const camera_metadata_t &resultMetadata)
14994{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014995 if (result != nullptr) {
14996 if (result->outputBuffers.size() != 1) {
14997 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14998 result->outputBuffers.size());
14999 return;
15000 }
15001
15002 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
15003 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
15004 result->outputBuffers[0].streamId);
15005 return;
15006 }
15007
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015008 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015009 HdrPlusPendingRequest pendingRequest;
15010 {
15011 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15012 auto req = mHdrPlusPendingRequests.find(result->requestId);
15013 pendingRequest = req->second;
15014 }
15015
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015016 // Update the result metadata with the settings of the HDR+ still capture request because
15017 // the result metadata belongs to a ZSL buffer.
15018 CameraMetadata metadata;
15019 metadata = &resultMetadata;
15020 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15021 camera_metadata_t* updatedResultMetadata = metadata.release();
15022
15023 QCamera3PicChannel *picChannel =
15024 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
15025
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015026 // Check if dumping HDR+ YUV output is enabled.
15027 char prop[PROPERTY_VALUE_MAX];
15028 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15029 bool dumpYuvOutput = atoi(prop);
15030
15031 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015032 // Dump yuv buffer to a ppm file.
15033 pbcamera::StreamConfiguration outputConfig;
15034 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
15035 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
15036 if (rc == OK) {
15037 char buf[FILENAME_MAX] = {};
15038 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15039 result->requestId, result->outputBuffers[0].streamId,
15040 outputConfig.image.width, outputConfig.image.height);
15041
15042 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
15043 } else {
15044 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
15045 __FUNCTION__, strerror(-rc), rc);
15046 }
15047 }
15048
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015049 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
15050 auto halMetadata = std::make_shared<metadata_buffer_t>();
15051 clear_metadata_buffer(halMetadata.get());
15052
15053 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
15054 // encoding.
15055 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15056 halStreamId, /*minFrameDuration*/0);
15057 if (res == OK) {
15058 // Return the buffer to pic channel for encoding.
15059 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
15060 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
15061 halMetadata);
15062 } else {
15063 // Return the buffer without encoding.
15064 // TODO: This should not happen but we may want to report an error buffer to camera
15065 // service.
15066 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
15067 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
15068 strerror(-res), res);
15069 }
15070
15071 // Send HDR+ metadata to framework.
15072 {
15073 pthread_mutex_lock(&mMutex);
15074
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015075 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15076 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015077 pthread_mutex_unlock(&mMutex);
15078 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015079
15080 // Remove the HDR+ pending request.
15081 {
15082 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15083 auto req = mHdrPlusPendingRequests.find(result->requestId);
15084 mHdrPlusPendingRequests.erase(req);
15085 }
15086 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015087}
15088
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015089void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15090{
15091 if (failedResult == nullptr) {
15092 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15093 return;
15094 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015095
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015096 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015097
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015098 // Remove the pending HDR+ request.
15099 {
15100 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15101 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15102
15103 // Return the buffer to pic channel.
15104 QCamera3PicChannel *picChannel =
15105 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15106 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15107
15108 mHdrPlusPendingRequests.erase(pendingRequest);
15109 }
15110
15111 pthread_mutex_lock(&mMutex);
15112
15113 // Find the pending buffers.
15114 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15115 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15116 if (pendingBuffers->frame_number == failedResult->requestId) {
15117 break;
15118 }
15119 pendingBuffers++;
15120 }
15121
15122 // Send out buffer errors for the pending buffers.
15123 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15124 std::vector<camera3_stream_buffer_t> streamBuffers;
15125 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15126 // Prepare a stream buffer.
15127 camera3_stream_buffer_t streamBuffer = {};
15128 streamBuffer.stream = buffer.stream;
15129 streamBuffer.buffer = buffer.buffer;
15130 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15131 streamBuffer.acquire_fence = -1;
15132 streamBuffer.release_fence = -1;
15133
15134 streamBuffers.push_back(streamBuffer);
15135
15136 // Send out error buffer event.
15137 camera3_notify_msg_t notify_msg = {};
15138 notify_msg.type = CAMERA3_MSG_ERROR;
15139 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15140 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15141 notify_msg.message.error.error_stream = buffer.stream;
15142
15143 orchestrateNotify(&notify_msg);
15144 }
15145
15146 camera3_capture_result_t result = {};
15147 result.frame_number = pendingBuffers->frame_number;
15148 result.num_output_buffers = streamBuffers.size();
15149 result.output_buffers = &streamBuffers[0];
15150
15151 // Send out result with buffer errors.
15152 orchestrateResult(&result);
15153
15154 // Remove pending buffers.
15155 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15156 }
15157
15158 // Remove pending request.
15159 auto halRequest = mPendingRequestsList.begin();
15160 while (halRequest != mPendingRequestsList.end()) {
15161 if (halRequest->frame_number == failedResult->requestId) {
15162 mPendingRequestsList.erase(halRequest);
15163 break;
15164 }
15165 halRequest++;
15166 }
15167
15168 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015169}
15170
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015171
15172ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15173 mParent(parent) {}
15174
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015175void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015176{
15177 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015178
15179 if (isReprocess) {
15180 mReprocessShutters.emplace(frameNumber, Shutter());
15181 } else {
15182 mShutters.emplace(frameNumber, Shutter());
15183 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015184}
15185
15186void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15187{
15188 std::lock_guard<std::mutex> lock(mLock);
15189
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015190 std::map<uint32_t, Shutter> *shutters = nullptr;
15191
15192 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015193 auto shutter = mShutters.find(frameNumber);
15194 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015195 shutter = mReprocessShutters.find(frameNumber);
15196 if (shutter == mReprocessShutters.end()) {
15197 // Shutter was already sent.
15198 return;
15199 }
15200 shutters = &mReprocessShutters;
15201 } else {
15202 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015203 }
15204
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015205 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015206 shutter->second.ready = true;
15207 shutter->second.timestamp = timestamp;
15208
15209 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015210 shutter = shutters->begin();
15211 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015212 if (!shutter->second.ready) {
15213 // If this shutter is not ready, the following shutters can't be sent.
15214 break;
15215 }
15216
15217 camera3_notify_msg_t msg = {};
15218 msg.type = CAMERA3_MSG_SHUTTER;
15219 msg.message.shutter.frame_number = shutter->first;
15220 msg.message.shutter.timestamp = shutter->second.timestamp;
15221 mParent->orchestrateNotify(&msg);
15222
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015223 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015224 }
15225}
15226
15227void ShutterDispatcher::clear(uint32_t frameNumber)
15228{
15229 std::lock_guard<std::mutex> lock(mLock);
15230 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015231 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015232}
15233
15234void ShutterDispatcher::clear()
15235{
15236 std::lock_guard<std::mutex> lock(mLock);
15237
15238 // Log errors for stale shutters.
15239 for (auto &shutter : mShutters) {
15240 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15241 __FUNCTION__, shutter.first, shutter.second.ready,
15242 shutter.second.timestamp);
15243 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015244
15245 // Log errors for stale reprocess shutters.
15246 for (auto &shutter : mReprocessShutters) {
15247 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15248 __FUNCTION__, shutter.first, shutter.second.ready,
15249 shutter.second.timestamp);
15250 }
15251
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015252 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015253 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015254}
15255
15256OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15257 mParent(parent) {}
15258
15259status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15260{
15261 std::lock_guard<std::mutex> lock(mLock);
15262 mStreamBuffers.clear();
15263 if (!streamList) {
15264 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15265 return -EINVAL;
15266 }
15267
15268 // Create a "frame-number -> buffer" map for each stream.
15269 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15270 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15271 }
15272
15273 return OK;
15274}
15275
15276status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15277{
15278 std::lock_guard<std::mutex> lock(mLock);
15279
15280 // Find the "frame-number -> buffer" map for the stream.
15281 auto buffers = mStreamBuffers.find(stream);
15282 if (buffers == mStreamBuffers.end()) {
15283 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15284 return -EINVAL;
15285 }
15286
15287 // Create an unready buffer for this frame number.
15288 buffers->second.emplace(frameNumber, Buffer());
15289 return OK;
15290}
15291
15292void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15293 const camera3_stream_buffer_t &buffer)
15294{
15295 std::lock_guard<std::mutex> lock(mLock);
15296
15297 // Find the frame number -> buffer map for the stream.
15298 auto buffers = mStreamBuffers.find(buffer.stream);
15299 if (buffers == mStreamBuffers.end()) {
15300 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15301 return;
15302 }
15303
15304 // Find the unready buffer this frame number and mark it ready.
15305 auto pendingBuffer = buffers->second.find(frameNumber);
15306 if (pendingBuffer == buffers->second.end()) {
15307 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15308 return;
15309 }
15310
15311 pendingBuffer->second.ready = true;
15312 pendingBuffer->second.buffer = buffer;
15313
15314 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15315 pendingBuffer = buffers->second.begin();
15316 while (pendingBuffer != buffers->second.end()) {
15317 if (!pendingBuffer->second.ready) {
15318 // If this buffer is not ready, the following buffers can't be sent.
15319 break;
15320 }
15321
15322 camera3_capture_result_t result = {};
15323 result.frame_number = pendingBuffer->first;
15324 result.num_output_buffers = 1;
15325 result.output_buffers = &pendingBuffer->second.buffer;
15326
15327 // Send out result with buffer errors.
15328 mParent->orchestrateResult(&result);
15329
15330 pendingBuffer = buffers->second.erase(pendingBuffer);
15331 }
15332}
15333
15334void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15335{
15336 std::lock_guard<std::mutex> lock(mLock);
15337
15338 // Log errors for stale buffers.
15339 for (auto &buffers : mStreamBuffers) {
15340 for (auto &buffer : buffers.second) {
15341 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15342 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15343 }
15344 buffers.second.clear();
15345 }
15346
15347 if (clearConfiguredStreams) {
15348 mStreamBuffers.clear();
15349 }
15350}
15351
Thierry Strudel3d639192016-09-09 11:52:26 -070015352}; //end namespace qcamera