blob: d8a9a241e65894a863038d84a7ecb3152579d834 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +010099#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Jason Lee8ce36fa2017-04-19 19:40:37 -0700116/* Face rect indices */
117#define FACE_LEFT 0
118#define FACE_TOP 1
119#define FACE_RIGHT 2
120#define FACE_BOTTOM 3
121#define FACE_WEIGHT 4
122
Thierry Strudel04e026f2016-10-10 11:27:36 -0700123/* Face landmarks indices */
124#define LEFT_EYE_X 0
125#define LEFT_EYE_Y 1
126#define RIGHT_EYE_X 2
127#define RIGHT_EYE_Y 3
128#define MOUTH_X 4
129#define MOUTH_Y 5
130#define TOTAL_LANDMARK_INDICES 6
131
Zhijun He2a5df222017-04-04 18:20:38 -0700132// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700133#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700134
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700135// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
136#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
137
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700138// Whether to check for the GPU stride padding, or use the default
139//#define CHECK_GPU_PIXEL_ALIGNMENT
140
Thierry Strudel3d639192016-09-09 11:52:26 -0700141cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
142const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
143extern pthread_mutex_t gCamLock;
144volatile uint32_t gCamHal3LogLevel = 1;
145extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700146
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800147// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700148// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700149std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
151std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
152bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
159Mutex gHdrPlusClientLock; // Protect above Easel related variables.
160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
279};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_flash_mode_t,
283 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
284 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
285 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
286 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
287};
288
289const QCamera3HardwareInterface::QCameraMap<
290 camera_metadata_enum_android_statistics_face_detect_mode_t,
291 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
295};
296
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
299 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
301 CAM_FOCUS_UNCALIBRATED },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
303 CAM_FOCUS_APPROXIMATE },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
305 CAM_FOCUS_CALIBRATED }
306};
307
308const QCamera3HardwareInterface::QCameraMap<
309 camera_metadata_enum_android_lens_state_t,
310 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
311 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
312 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
313};
314
315const int32_t available_thumbnail_sizes[] = {0, 0,
316 176, 144,
317 240, 144,
318 256, 144,
319 240, 160,
320 256, 154,
321 240, 240,
322 320, 240};
323
324const QCamera3HardwareInterface::QCameraMap<
325 camera_metadata_enum_android_sensor_test_pattern_mode_t,
326 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
333};
334
335/* Since there is no mapping for all the options some Android enum are not listed.
336 * Also, the order in this list is important because while mapping from HAL to Android it will
337 * traverse from lower to higher index which means that for HAL values that are map to different
338 * Android values, the traverse logic will select the first one found.
339 */
340const QCamera3HardwareInterface::QCameraMap<
341 camera_metadata_enum_android_sensor_reference_illuminant1_t,
342 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
359};
360
361const QCamera3HardwareInterface::QCameraMap<
362 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
363 { 60, CAM_HFR_MODE_60FPS},
364 { 90, CAM_HFR_MODE_90FPS},
365 { 120, CAM_HFR_MODE_120FPS},
366 { 150, CAM_HFR_MODE_150FPS},
367 { 180, CAM_HFR_MODE_180FPS},
368 { 210, CAM_HFR_MODE_210FPS},
369 { 240, CAM_HFR_MODE_240FPS},
370 { 480, CAM_HFR_MODE_480FPS},
371};
372
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700373const QCamera3HardwareInterface::QCameraMap<
374 qcamera3_ext_instant_aec_mode_t,
375 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
376 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
377 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
379};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800380
381const QCamera3HardwareInterface::QCameraMap<
382 qcamera3_ext_exposure_meter_mode_t,
383 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
384 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
386 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
387 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
388 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
390 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
391};
392
393const QCamera3HardwareInterface::QCameraMap<
394 qcamera3_ext_iso_mode_t,
395 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
396 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
397 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
398 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
399 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
400 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
401 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
402 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
403 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
404};
405
Thierry Strudel3d639192016-09-09 11:52:26 -0700406camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
407 .initialize = QCamera3HardwareInterface::initialize,
408 .configure_streams = QCamera3HardwareInterface::configure_streams,
409 .register_stream_buffers = NULL,
410 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
411 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
412 .get_metadata_vendor_tag_ops = NULL,
413 .dump = QCamera3HardwareInterface::dump,
414 .flush = QCamera3HardwareInterface::flush,
415 .reserved = {0},
416};
417
418// initialise to some default value
419uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
420
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700421static inline void logEaselEvent(const char *tag, const char *event) {
422 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
423 struct timespec ts = {};
424 static int64_t kMsPerSec = 1000;
425 static int64_t kNsPerMs = 1000000;
426 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
427 if (res != OK) {
428 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
429 } else {
430 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
431 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
432 }
433 }
434}
435
Thierry Strudel3d639192016-09-09 11:52:26 -0700436/*===========================================================================
437 * FUNCTION : QCamera3HardwareInterface
438 *
439 * DESCRIPTION: constructor of QCamera3HardwareInterface
440 *
441 * PARAMETERS :
442 * @cameraId : camera ID
443 *
444 * RETURN : none
445 *==========================================================================*/
446QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
447 const camera_module_callbacks_t *callbacks)
448 : mCameraId(cameraId),
449 mCameraHandle(NULL),
450 mCameraInitialized(false),
451 mCallbackOps(NULL),
452 mMetadataChannel(NULL),
453 mPictureChannel(NULL),
454 mRawChannel(NULL),
455 mSupportChannel(NULL),
456 mAnalysisChannel(NULL),
457 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700458 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700459 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800460 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100461 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800462 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mChannelHandle(0),
464 mFirstConfiguration(true),
465 mFlush(false),
466 mFlushPerf(false),
467 mParamHeap(NULL),
468 mParameters(NULL),
469 mPrevParameters(NULL),
470 m_bIsVideo(false),
471 m_bIs4KVideo(false),
472 m_bEisSupportedSize(false),
473 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800474 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700476 mShutterDispatcher(this),
477 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mMinProcessedFrameDuration(0),
479 mMinJpegFrameDuration(0),
480 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100481 mExpectedFrameDuration(0),
482 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700483 mMetaFrameCount(0U),
484 mUpdateDebugLevel(false),
485 mCallbacks(callbacks),
486 mCaptureIntent(0),
487 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700488 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800489 /* DevCamDebug metadata internal m control*/
490 mDevCamDebugMetaEnable(0),
491 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700492 mBatchSize(0),
493 mToBeQueuedVidBufs(0),
494 mHFRVideoFps(DEFAULT_VIDEO_FPS),
495 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800496 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800497 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700498 mFirstFrameNumberInBatch(0),
499 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800500 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700501 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
502 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000503 mPDSupported(false),
504 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700505 mInstantAEC(false),
506 mResetInstantAEC(false),
507 mInstantAECSettledFrameNumber(0),
508 mAecSkipDisplayFrameBound(0),
509 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800510 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700513 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mState(CLOSED),
515 mIsDeviceLinked(false),
516 mIsMainCamera(true),
517 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700518 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800519 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700521 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800522 mIsApInputUsedForHdrPlus(false),
523 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700524 m_bSensorHDREnabled(false),
525 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700526{
527 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700528 mCommon.init(gCamCapability[cameraId]);
529 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700530#ifndef USE_HAL_3_3
531 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
532#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700533 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700534#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mCameraDevice.common.close = close_camera_device;
536 mCameraDevice.ops = &mCameraOps;
537 mCameraDevice.priv = this;
538 gCamCapability[cameraId]->version = CAM_HAL_V3;
539 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
540 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
541 gCamCapability[cameraId]->min_num_pp_bufs = 3;
542
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800543 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700544
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800545 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700546 mPendingLiveRequest = 0;
547 mCurrentRequestId = -1;
548 pthread_mutex_init(&mMutex, NULL);
549
550 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
551 mDefaultMetadata[i] = NULL;
552
553 // Getting system props of different kinds
554 char prop[PROPERTY_VALUE_MAX];
555 memset(prop, 0, sizeof(prop));
556 property_get("persist.camera.raw.dump", prop, "0");
557 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800558 property_get("persist.camera.hal3.force.hdr", prop, "0");
559 mForceHdrSnapshot = atoi(prop);
560
Thierry Strudel3d639192016-09-09 11:52:26 -0700561 if (mEnableRawDump)
562 LOGD("Raw dump from Camera HAL enabled");
563
564 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
565 memset(mLdafCalib, 0, sizeof(mLdafCalib));
566
567 memset(prop, 0, sizeof(prop));
568 property_get("persist.camera.tnr.preview", prop, "0");
569 m_bTnrPreview = (uint8_t)atoi(prop);
570
571 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800572 property_get("persist.camera.swtnr.preview", prop, "1");
573 m_bSwTnrPreview = (uint8_t)atoi(prop);
574
575 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700576 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700577 m_bTnrVideo = (uint8_t)atoi(prop);
578
579 memset(prop, 0, sizeof(prop));
580 property_get("persist.camera.avtimer.debug", prop, "0");
581 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800582 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700583
Thierry Strudel54dc9782017-02-15 12:12:10 -0800584 memset(prop, 0, sizeof(prop));
585 property_get("persist.camera.cacmode.disable", prop, "0");
586 m_cacModeDisabled = (uint8_t)atoi(prop);
587
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700588 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700589 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700590
Thierry Strudel3d639192016-09-09 11:52:26 -0700591 //Load and read GPU library.
592 lib_surface_utils = NULL;
593 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700594 mSurfaceStridePadding = CAM_PAD_TO_64;
595#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700596 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
597 if (lib_surface_utils) {
598 *(void **)&LINK_get_surface_pixel_alignment =
599 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
600 if (LINK_get_surface_pixel_alignment) {
601 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
602 }
603 dlclose(lib_surface_utils);
604 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700605#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000606 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
607 mPDSupported = (0 <= mPDIndex) ? true : false;
608
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700609 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700610}
611
612/*===========================================================================
613 * FUNCTION : ~QCamera3HardwareInterface
614 *
615 * DESCRIPTION: destructor of QCamera3HardwareInterface
616 *
617 * PARAMETERS : none
618 *
619 * RETURN : none
620 *==========================================================================*/
621QCamera3HardwareInterface::~QCamera3HardwareInterface()
622{
623 LOGD("E");
624
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800625 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700626
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800627 // Disable power hint and enable the perf lock for close camera
628 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
629 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
630
631 // unlink of dualcam during close camera
632 if (mIsDeviceLinked) {
633 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
634 &m_pDualCamCmdPtr->bundle_info;
635 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
636 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
637 pthread_mutex_lock(&gCamLock);
638
639 if (mIsMainCamera == 1) {
640 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
641 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
642 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
643 // related session id should be session id of linked session
644 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
645 } else {
646 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
647 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
648 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
649 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
650 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800651 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800652 pthread_mutex_unlock(&gCamLock);
653
654 rc = mCameraHandle->ops->set_dual_cam_cmd(
655 mCameraHandle->camera_handle);
656 if (rc < 0) {
657 LOGE("Dualcam: Unlink failed, but still proceed to close");
658 }
659 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700660
661 /* We need to stop all streams before deleting any stream */
662 if (mRawDumpChannel) {
663 mRawDumpChannel->stop();
664 }
665
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700666 if (mHdrPlusRawSrcChannel) {
667 mHdrPlusRawSrcChannel->stop();
668 }
669
Thierry Strudel3d639192016-09-09 11:52:26 -0700670 // NOTE: 'camera3_stream_t *' objects are already freed at
671 // this stage by the framework
672 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
673 it != mStreamInfo.end(); it++) {
674 QCamera3ProcessingChannel *channel = (*it)->channel;
675 if (channel) {
676 channel->stop();
677 }
678 }
679 if (mSupportChannel)
680 mSupportChannel->stop();
681
682 if (mAnalysisChannel) {
683 mAnalysisChannel->stop();
684 }
685 if (mMetadataChannel) {
686 mMetadataChannel->stop();
687 }
688 if (mChannelHandle) {
689 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700690 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700691 LOGD("stopping channel %d", mChannelHandle);
692 }
693
694 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
695 it != mStreamInfo.end(); it++) {
696 QCamera3ProcessingChannel *channel = (*it)->channel;
697 if (channel)
698 delete channel;
699 free (*it);
700 }
701 if (mSupportChannel) {
702 delete mSupportChannel;
703 mSupportChannel = NULL;
704 }
705
706 if (mAnalysisChannel) {
707 delete mAnalysisChannel;
708 mAnalysisChannel = NULL;
709 }
710 if (mRawDumpChannel) {
711 delete mRawDumpChannel;
712 mRawDumpChannel = NULL;
713 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700714 if (mHdrPlusRawSrcChannel) {
715 delete mHdrPlusRawSrcChannel;
716 mHdrPlusRawSrcChannel = NULL;
717 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700718 if (mDummyBatchChannel) {
719 delete mDummyBatchChannel;
720 mDummyBatchChannel = NULL;
721 }
722
723 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800724 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700725
726 if (mMetadataChannel) {
727 delete mMetadataChannel;
728 mMetadataChannel = NULL;
729 }
730
731 /* Clean up all channels */
732 if (mCameraInitialized) {
733 if(!mFirstConfiguration){
734 //send the last unconfigure
735 cam_stream_size_info_t stream_config_info;
736 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
737 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
738 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800739 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700740 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700741 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700742 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
743 stream_config_info);
744 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
745 if (rc < 0) {
746 LOGE("set_parms failed for unconfigure");
747 }
748 }
749 deinitParameters();
750 }
751
752 if (mChannelHandle) {
753 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
754 mChannelHandle);
755 LOGH("deleting channel %d", mChannelHandle);
756 mChannelHandle = 0;
757 }
758
759 if (mState != CLOSED)
760 closeCamera();
761
762 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
763 req.mPendingBufferList.clear();
764 }
765 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700766 for (pendingRequestIterator i = mPendingRequestsList.begin();
767 i != mPendingRequestsList.end();) {
768 i = erasePendingRequest(i);
769 }
770 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
771 if (mDefaultMetadata[i])
772 free_camera_metadata(mDefaultMetadata[i]);
773
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800774 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700775
776 pthread_cond_destroy(&mRequestCond);
777
778 pthread_cond_destroy(&mBuffersCond);
779
780 pthread_mutex_destroy(&mMutex);
781 LOGD("X");
782}
783
784/*===========================================================================
785 * FUNCTION : erasePendingRequest
786 *
787 * DESCRIPTION: function to erase a desired pending request after freeing any
788 * allocated memory
789 *
790 * PARAMETERS :
791 * @i : iterator pointing to pending request to be erased
792 *
793 * RETURN : iterator pointing to the next request
794 *==========================================================================*/
795QCamera3HardwareInterface::pendingRequestIterator
796 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
797{
798 if (i->input_buffer != NULL) {
799 free(i->input_buffer);
800 i->input_buffer = NULL;
801 }
802 if (i->settings != NULL)
803 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100804
805 mExpectedInflightDuration -= i->expectedFrameDuration;
806 if (mExpectedInflightDuration < 0) {
807 LOGE("Negative expected in-flight duration!");
808 mExpectedInflightDuration = 0;
809 }
810
Thierry Strudel3d639192016-09-09 11:52:26 -0700811 return mPendingRequestsList.erase(i);
812}
813
814/*===========================================================================
815 * FUNCTION : camEvtHandle
816 *
817 * DESCRIPTION: Function registered to mm-camera-interface to handle events
818 *
819 * PARAMETERS :
820 * @camera_handle : interface layer camera handle
821 * @evt : ptr to event
822 * @user_data : user data ptr
823 *
824 * RETURN : none
825 *==========================================================================*/
826void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
827 mm_camera_event_t *evt,
828 void *user_data)
829{
830 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
831 if (obj && evt) {
832 switch(evt->server_event_type) {
833 case CAM_EVENT_TYPE_DAEMON_DIED:
834 pthread_mutex_lock(&obj->mMutex);
835 obj->mState = ERROR;
836 pthread_mutex_unlock(&obj->mMutex);
837 LOGE("Fatal, camera daemon died");
838 break;
839
840 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
841 LOGD("HAL got request pull from Daemon");
842 pthread_mutex_lock(&obj->mMutex);
843 obj->mWokenUpByDaemon = true;
844 obj->unblockRequestIfNecessary();
845 pthread_mutex_unlock(&obj->mMutex);
846 break;
847
848 default:
849 LOGW("Warning: Unhandled event %d",
850 evt->server_event_type);
851 break;
852 }
853 } else {
854 LOGE("NULL user_data/evt");
855 }
856}
857
858/*===========================================================================
859 * FUNCTION : openCamera
860 *
861 * DESCRIPTION: open camera
862 *
863 * PARAMETERS :
864 * @hw_device : double ptr for camera device struct
865 *
866 * RETURN : int32_t type of status
867 * NO_ERROR -- success
868 * none-zero failure code
869 *==========================================================================*/
870int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
871{
872 int rc = 0;
873 if (mState != CLOSED) {
874 *hw_device = NULL;
875 return PERMISSION_DENIED;
876 }
877
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700878 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800879 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700880 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
881 mCameraId);
882
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700883 if (mCameraHandle) {
884 LOGE("Failure: Camera already opened");
885 return ALREADY_EXISTS;
886 }
887
888 {
889 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700890 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700891 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen933db802017-07-14 14:31:53 -0700892 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700893 if (rc != 0) {
894 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
895 return rc;
896 }
897 }
898 }
899
Thierry Strudel3d639192016-09-09 11:52:26 -0700900 rc = openCamera();
901 if (rc == 0) {
902 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800903 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700904 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700905
906 // Suspend Easel because opening camera failed.
907 {
908 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700909 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
910 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700911 if (suspendErr != 0) {
912 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
913 strerror(-suspendErr), suspendErr);
914 }
915 }
916 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800917 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700918
Thierry Strudel3d639192016-09-09 11:52:26 -0700919 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
920 mCameraId, rc);
921
922 if (rc == NO_ERROR) {
923 mState = OPENED;
924 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800925
Thierry Strudel3d639192016-09-09 11:52:26 -0700926 return rc;
927}
928
929/*===========================================================================
930 * FUNCTION : openCamera
931 *
932 * DESCRIPTION: open camera
933 *
934 * PARAMETERS : none
935 *
936 * RETURN : int32_t type of status
937 * NO_ERROR -- success
938 * none-zero failure code
939 *==========================================================================*/
940int QCamera3HardwareInterface::openCamera()
941{
942 int rc = 0;
943 char value[PROPERTY_VALUE_MAX];
944
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800945 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800946
Thierry Strudel3d639192016-09-09 11:52:26 -0700947 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
948 if (rc < 0) {
949 LOGE("Failed to reserve flash for camera id: %d",
950 mCameraId);
951 return UNKNOWN_ERROR;
952 }
953
954 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
955 if (rc) {
956 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
957 return rc;
958 }
959
960 if (!mCameraHandle) {
961 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
962 return -ENODEV;
963 }
964
965 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
966 camEvtHandle, (void *)this);
967
968 if (rc < 0) {
969 LOGE("Error, failed to register event callback");
970 /* Not closing camera here since it is already handled in destructor */
971 return FAILED_TRANSACTION;
972 }
973
974 mExifParams.debug_params =
975 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
976 if (mExifParams.debug_params) {
977 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
978 } else {
979 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
980 return NO_MEMORY;
981 }
982 mFirstConfiguration = true;
983
984 //Notify display HAL that a camera session is active.
985 //But avoid calling the same during bootup because camera service might open/close
986 //cameras at boot time during its initialization and display service will also internally
987 //wait for camera service to initialize first while calling this display API, resulting in a
988 //deadlock situation. Since boot time camera open/close calls are made only to fetch
989 //capabilities, no need of this display bw optimization.
990 //Use "service.bootanim.exit" property to know boot status.
991 property_get("service.bootanim.exit", value, "0");
992 if (atoi(value) == 1) {
993 pthread_mutex_lock(&gCamLock);
994 if (gNumCameraSessions++ == 0) {
995 setCameraLaunchStatus(true);
996 }
997 pthread_mutex_unlock(&gCamLock);
998 }
999
1000 //fill the session id needed while linking dual cam
1001 pthread_mutex_lock(&gCamLock);
1002 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1003 &sessionId[mCameraId]);
1004 pthread_mutex_unlock(&gCamLock);
1005
1006 if (rc < 0) {
1007 LOGE("Error, failed to get sessiion id");
1008 return UNKNOWN_ERROR;
1009 } else {
1010 //Allocate related cam sync buffer
1011 //this is needed for the payload that goes along with bundling cmd for related
1012 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001013 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1014 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001015 if(rc != OK) {
1016 rc = NO_MEMORY;
1017 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1018 return NO_MEMORY;
1019 }
1020
1021 //Map memory for related cam sync buffer
1022 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001023 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1024 m_pDualCamCmdHeap->getFd(0),
1025 sizeof(cam_dual_camera_cmd_info_t),
1026 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001027 if(rc < 0) {
1028 LOGE("Dualcam: failed to map Related cam sync buffer");
1029 rc = FAILED_TRANSACTION;
1030 return NO_MEMORY;
1031 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001032 m_pDualCamCmdPtr =
1033 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001034 }
1035
1036 LOGH("mCameraId=%d",mCameraId);
1037
1038 return NO_ERROR;
1039}
1040
1041/*===========================================================================
1042 * FUNCTION : closeCamera
1043 *
1044 * DESCRIPTION: close camera
1045 *
1046 * PARAMETERS : none
1047 *
1048 * RETURN : int32_t type of status
1049 * NO_ERROR -- success
1050 * none-zero failure code
1051 *==========================================================================*/
1052int QCamera3HardwareInterface::closeCamera()
1053{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001054 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 int rc = NO_ERROR;
1056 char value[PROPERTY_VALUE_MAX];
1057
1058 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1059 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001060
1061 // unmap memory for related cam sync buffer
1062 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001063 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001064 if (NULL != m_pDualCamCmdHeap) {
1065 m_pDualCamCmdHeap->deallocate();
1066 delete m_pDualCamCmdHeap;
1067 m_pDualCamCmdHeap = NULL;
1068 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001069 }
1070
Thierry Strudel3d639192016-09-09 11:52:26 -07001071 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1072 mCameraHandle = NULL;
1073
1074 //reset session id to some invalid id
1075 pthread_mutex_lock(&gCamLock);
1076 sessionId[mCameraId] = 0xDEADBEEF;
1077 pthread_mutex_unlock(&gCamLock);
1078
1079 //Notify display HAL that there is no active camera session
1080 //but avoid calling the same during bootup. Refer to openCamera
1081 //for more details.
1082 property_get("service.bootanim.exit", value, "0");
1083 if (atoi(value) == 1) {
1084 pthread_mutex_lock(&gCamLock);
1085 if (--gNumCameraSessions == 0) {
1086 setCameraLaunchStatus(false);
1087 }
1088 pthread_mutex_unlock(&gCamLock);
1089 }
1090
Thierry Strudel3d639192016-09-09 11:52:26 -07001091 if (mExifParams.debug_params) {
1092 free(mExifParams.debug_params);
1093 mExifParams.debug_params = NULL;
1094 }
1095 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1096 LOGW("Failed to release flash for camera id: %d",
1097 mCameraId);
1098 }
1099 mState = CLOSED;
1100 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1101 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001102
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001103 {
1104 Mutex::Autolock l(gHdrPlusClientLock);
1105 if (gHdrPlusClient != nullptr) {
1106 // Disable HDR+ mode.
1107 disableHdrPlusModeLocked();
1108 // Disconnect Easel if it's connected.
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001109 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001110 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001111 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001112
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001113 if (EaselManagerClientOpened) {
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001114 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001115 if (rc != 0) {
1116 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1117 }
1118
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001119 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001120 if (rc != 0) {
1121 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1122 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001123 }
1124 }
1125
Thierry Strudel3d639192016-09-09 11:52:26 -07001126 return rc;
1127}
1128
1129/*===========================================================================
1130 * FUNCTION : initialize
1131 *
1132 * DESCRIPTION: Initialize frameworks callback functions
1133 *
1134 * PARAMETERS :
1135 * @callback_ops : callback function to frameworks
1136 *
1137 * RETURN :
1138 *
1139 *==========================================================================*/
1140int QCamera3HardwareInterface::initialize(
1141 const struct camera3_callback_ops *callback_ops)
1142{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001143 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001144 int rc;
1145
1146 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1147 pthread_mutex_lock(&mMutex);
1148
1149 // Validate current state
1150 switch (mState) {
1151 case OPENED:
1152 /* valid state */
1153 break;
1154 default:
1155 LOGE("Invalid state %d", mState);
1156 rc = -ENODEV;
1157 goto err1;
1158 }
1159
1160 rc = initParameters();
1161 if (rc < 0) {
1162 LOGE("initParamters failed %d", rc);
1163 goto err1;
1164 }
1165 mCallbackOps = callback_ops;
1166
1167 mChannelHandle = mCameraHandle->ops->add_channel(
1168 mCameraHandle->camera_handle, NULL, NULL, this);
1169 if (mChannelHandle == 0) {
1170 LOGE("add_channel failed");
1171 rc = -ENOMEM;
1172 pthread_mutex_unlock(&mMutex);
1173 return rc;
1174 }
1175
1176 pthread_mutex_unlock(&mMutex);
1177 mCameraInitialized = true;
1178 mState = INITIALIZED;
1179 LOGI("X");
1180 return 0;
1181
1182err1:
1183 pthread_mutex_unlock(&mMutex);
1184 return rc;
1185}
1186
1187/*===========================================================================
1188 * FUNCTION : validateStreamDimensions
1189 *
1190 * DESCRIPTION: Check if the configuration requested are those advertised
1191 *
1192 * PARAMETERS :
1193 * @stream_list : streams to be configured
1194 *
1195 * RETURN :
1196 *
1197 *==========================================================================*/
1198int QCamera3HardwareInterface::validateStreamDimensions(
1199 camera3_stream_configuration_t *streamList)
1200{
1201 int rc = NO_ERROR;
1202 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001203 uint32_t depthWidth = 0;
1204 uint32_t depthHeight = 0;
1205 if (mPDSupported) {
1206 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1207 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1208 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001209
1210 camera3_stream_t *inputStream = NULL;
1211 /*
1212 * Loop through all streams to find input stream if it exists*
1213 */
1214 for (size_t i = 0; i< streamList->num_streams; i++) {
1215 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1216 if (inputStream != NULL) {
1217 LOGE("Error, Multiple input streams requested");
1218 return -EINVAL;
1219 }
1220 inputStream = streamList->streams[i];
1221 }
1222 }
1223 /*
1224 * Loop through all streams requested in configuration
1225 * Check if unsupported sizes have been requested on any of them
1226 */
1227 for (size_t j = 0; j < streamList->num_streams; j++) {
1228 bool sizeFound = false;
1229 camera3_stream_t *newStream = streamList->streams[j];
1230
1231 uint32_t rotatedHeight = newStream->height;
1232 uint32_t rotatedWidth = newStream->width;
1233 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1234 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1235 rotatedHeight = newStream->width;
1236 rotatedWidth = newStream->height;
1237 }
1238
1239 /*
1240 * Sizes are different for each type of stream format check against
1241 * appropriate table.
1242 */
1243 switch (newStream->format) {
1244 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1245 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1246 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001247 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1248 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1249 mPDSupported) {
1250 if ((depthWidth == newStream->width) &&
1251 (depthHeight == newStream->height)) {
1252 sizeFound = true;
1253 }
1254 break;
1255 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001256 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1257 for (size_t i = 0; i < count; i++) {
1258 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1259 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1260 sizeFound = true;
1261 break;
1262 }
1263 }
1264 break;
1265 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001266 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1267 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001268 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001269 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001270 if ((depthSamplesCount == newStream->width) &&
1271 (1 == newStream->height)) {
1272 sizeFound = true;
1273 }
1274 break;
1275 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001276 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1277 /* Verify set size against generated sizes table */
1278 for (size_t i = 0; i < count; i++) {
1279 if (((int32_t)rotatedWidth ==
1280 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1281 ((int32_t)rotatedHeight ==
1282 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1283 sizeFound = true;
1284 break;
1285 }
1286 }
1287 break;
1288 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1289 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1290 default:
1291 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1292 || newStream->stream_type == CAMERA3_STREAM_INPUT
1293 || IS_USAGE_ZSL(newStream->usage)) {
1294 if (((int32_t)rotatedWidth ==
1295 gCamCapability[mCameraId]->active_array_size.width) &&
1296 ((int32_t)rotatedHeight ==
1297 gCamCapability[mCameraId]->active_array_size.height)) {
1298 sizeFound = true;
1299 break;
1300 }
1301 /* We could potentially break here to enforce ZSL stream
1302 * set from frameworks always is full active array size
1303 * but it is not clear from the spc if framework will always
1304 * follow that, also we have logic to override to full array
1305 * size, so keeping the logic lenient at the moment
1306 */
1307 }
1308 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1309 MAX_SIZES_CNT);
1310 for (size_t i = 0; i < count; i++) {
1311 if (((int32_t)rotatedWidth ==
1312 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1313 ((int32_t)rotatedHeight ==
1314 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1315 sizeFound = true;
1316 break;
1317 }
1318 }
1319 break;
1320 } /* End of switch(newStream->format) */
1321
1322 /* We error out even if a single stream has unsupported size set */
1323 if (!sizeFound) {
1324 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1325 rotatedWidth, rotatedHeight, newStream->format,
1326 gCamCapability[mCameraId]->active_array_size.width,
1327 gCamCapability[mCameraId]->active_array_size.height);
1328 rc = -EINVAL;
1329 break;
1330 }
1331 } /* End of for each stream */
1332 return rc;
1333}
1334
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001335/*===========================================================================
1336 * FUNCTION : validateUsageFlags
1337 *
1338 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1339 *
1340 * PARAMETERS :
1341 * @stream_list : streams to be configured
1342 *
1343 * RETURN :
1344 * NO_ERROR if the usage flags are supported
1345 * error code if usage flags are not supported
1346 *
1347 *==========================================================================*/
1348int QCamera3HardwareInterface::validateUsageFlags(
1349 const camera3_stream_configuration_t* streamList)
1350{
1351 for (size_t j = 0; j < streamList->num_streams; j++) {
1352 const camera3_stream_t *newStream = streamList->streams[j];
1353
1354 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1355 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1356 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1357 continue;
1358 }
1359
Jason Leec4cf5032017-05-24 18:31:41 -07001360 // Here we only care whether it's EIS3 or not
1361 char is_type_value[PROPERTY_VALUE_MAX];
1362 property_get("persist.camera.is_type", is_type_value, "4");
1363 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1364 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1365 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1366 isType = IS_TYPE_NONE;
1367
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1369 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1370 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1371 bool forcePreviewUBWC = true;
1372 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1373 forcePreviewUBWC = false;
1374 }
1375 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001376 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001377 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001378 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001379 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001380 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001381
1382 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1383 // So color spaces will always match.
1384
1385 // Check whether underlying formats of shared streams match.
1386 if (isVideo && isPreview && videoFormat != previewFormat) {
1387 LOGE("Combined video and preview usage flag is not supported");
1388 return -EINVAL;
1389 }
1390 if (isPreview && isZSL && previewFormat != zslFormat) {
1391 LOGE("Combined preview and zsl usage flag is not supported");
1392 return -EINVAL;
1393 }
1394 if (isVideo && isZSL && videoFormat != zslFormat) {
1395 LOGE("Combined video and zsl usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 }
1399 return NO_ERROR;
1400}
1401
1402/*===========================================================================
1403 * FUNCTION : validateUsageFlagsForEis
1404 *
1405 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1406 *
1407 * PARAMETERS :
1408 * @stream_list : streams to be configured
1409 *
1410 * RETURN :
1411 * NO_ERROR if the usage flags are supported
1412 * error code if usage flags are not supported
1413 *
1414 *==========================================================================*/
1415int QCamera3HardwareInterface::validateUsageFlagsForEis(
1416 const camera3_stream_configuration_t* streamList)
1417{
1418 for (size_t j = 0; j < streamList->num_streams; j++) {
1419 const camera3_stream_t *newStream = streamList->streams[j];
1420
1421 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1422 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1423
1424 // Because EIS is "hard-coded" for certain use case, and current
1425 // implementation doesn't support shared preview and video on the same
1426 // stream, return failure if EIS is forced on.
1427 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1428 LOGE("Combined video and preview usage flag is not supported due to EIS");
1429 return -EINVAL;
1430 }
1431 }
1432 return NO_ERROR;
1433}
1434
Thierry Strudel3d639192016-09-09 11:52:26 -07001435/*==============================================================================
1436 * FUNCTION : isSupportChannelNeeded
1437 *
1438 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1439 *
1440 * PARAMETERS :
1441 * @stream_list : streams to be configured
1442 * @stream_config_info : the config info for streams to be configured
1443 *
1444 * RETURN : Boolen true/false decision
1445 *
1446 *==========================================================================*/
1447bool QCamera3HardwareInterface::isSupportChannelNeeded(
1448 camera3_stream_configuration_t *streamList,
1449 cam_stream_size_info_t stream_config_info)
1450{
1451 uint32_t i;
1452 bool pprocRequested = false;
1453 /* Check for conditions where PProc pipeline does not have any streams*/
1454 for (i = 0; i < stream_config_info.num_streams; i++) {
1455 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1456 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1457 pprocRequested = true;
1458 break;
1459 }
1460 }
1461
1462 if (pprocRequested == false )
1463 return true;
1464
1465 /* Dummy stream needed if only raw or jpeg streams present */
1466 for (i = 0; i < streamList->num_streams; i++) {
1467 switch(streamList->streams[i]->format) {
1468 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1469 case HAL_PIXEL_FORMAT_RAW10:
1470 case HAL_PIXEL_FORMAT_RAW16:
1471 case HAL_PIXEL_FORMAT_BLOB:
1472 break;
1473 default:
1474 return false;
1475 }
1476 }
1477 return true;
1478}
1479
1480/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001483 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001484 *
1485 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001486 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001487 *
1488 * RETURN : int32_t type of status
1489 * NO_ERROR -- success
1490 * none-zero failure code
1491 *
1492 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001493int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001494{
1495 int32_t rc = NO_ERROR;
1496
1497 cam_dimension_t max_dim = {0, 0};
1498 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1499 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1500 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1501 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1502 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1503 }
1504
1505 clear_metadata_buffer(mParameters);
1506
1507 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1508 max_dim);
1509 if (rc != NO_ERROR) {
1510 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1511 return rc;
1512 }
1513
1514 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1515 if (rc != NO_ERROR) {
1516 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1517 return rc;
1518 }
1519
1520 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001521 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001522
1523 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1524 mParameters);
1525 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001526 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001527 return rc;
1528 }
1529
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001530 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001531 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1532 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1533 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1534 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1535 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001536
1537 return rc;
1538}
1539
1540/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001541 * FUNCTION : getCurrentSensorModeInfo
1542 *
1543 * DESCRIPTION: Get sensor mode information that is currently selected.
1544 *
1545 * PARAMETERS :
1546 * @sensorModeInfo : sensor mode information (output)
1547 *
1548 * RETURN : int32_t type of status
1549 * NO_ERROR -- success
1550 * none-zero failure code
1551 *
1552 *==========================================================================*/
1553int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1554{
1555 int32_t rc = NO_ERROR;
1556
1557 clear_metadata_buffer(mParameters);
1558 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1559
1560 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1561 mParameters);
1562 if (rc != NO_ERROR) {
1563 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1564 return rc;
1565 }
1566
1567 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1568 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1569 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1570 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1571 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1572 sensorModeInfo.num_raw_bits);
1573
1574 return rc;
1575}
1576
1577/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001578 * FUNCTION : addToPPFeatureMask
1579 *
1580 * DESCRIPTION: add additional features to pp feature mask based on
1581 * stream type and usecase
1582 *
1583 * PARAMETERS :
1584 * @stream_format : stream type for feature mask
1585 * @stream_idx : stream idx within postprocess_mask list to change
1586 *
1587 * RETURN : NULL
1588 *
1589 *==========================================================================*/
1590void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1591 uint32_t stream_idx)
1592{
1593 char feature_mask_value[PROPERTY_VALUE_MAX];
1594 cam_feature_mask_t feature_mask;
1595 int args_converted;
1596 int property_len;
1597
1598 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001599#ifdef _LE_CAMERA_
1600 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1601 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1602 property_len = property_get("persist.camera.hal3.feature",
1603 feature_mask_value, swtnr_feature_mask_value);
1604#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001605 property_len = property_get("persist.camera.hal3.feature",
1606 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001607#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001608 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1609 (feature_mask_value[1] == 'x')) {
1610 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1611 } else {
1612 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1613 }
1614 if (1 != args_converted) {
1615 feature_mask = 0;
1616 LOGE("Wrong feature mask %s", feature_mask_value);
1617 return;
1618 }
1619
1620 switch (stream_format) {
1621 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1622 /* Add LLVD to pp feature mask only if video hint is enabled */
1623 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1624 mStreamConfigInfo.postprocess_mask[stream_idx]
1625 |= CAM_QTI_FEATURE_SW_TNR;
1626 LOGH("Added SW TNR to pp feature mask");
1627 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1628 mStreamConfigInfo.postprocess_mask[stream_idx]
1629 |= CAM_QCOM_FEATURE_LLVD;
1630 LOGH("Added LLVD SeeMore to pp feature mask");
1631 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001632 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1633 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1634 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1635 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001636 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1637 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1638 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1639 CAM_QTI_FEATURE_BINNING_CORRECTION;
1640 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001641 break;
1642 }
1643 default:
1644 break;
1645 }
1646 LOGD("PP feature mask %llx",
1647 mStreamConfigInfo.postprocess_mask[stream_idx]);
1648}
1649
1650/*==============================================================================
1651 * FUNCTION : updateFpsInPreviewBuffer
1652 *
1653 * DESCRIPTION: update FPS information in preview buffer.
1654 *
1655 * PARAMETERS :
1656 * @metadata : pointer to metadata buffer
1657 * @frame_number: frame_number to look for in pending buffer list
1658 *
1659 * RETURN : None
1660 *
1661 *==========================================================================*/
1662void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1663 uint32_t frame_number)
1664{
1665 // Mark all pending buffers for this particular request
1666 // with corresponding framerate information
1667 for (List<PendingBuffersInRequest>::iterator req =
1668 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1669 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1670 for(List<PendingBufferInfo>::iterator j =
1671 req->mPendingBufferList.begin();
1672 j != req->mPendingBufferList.end(); j++) {
1673 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1674 if ((req->frame_number == frame_number) &&
1675 (channel->getStreamTypeMask() &
1676 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1677 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1678 CAM_INTF_PARM_FPS_RANGE, metadata) {
1679 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1680 struct private_handle_t *priv_handle =
1681 (struct private_handle_t *)(*(j->buffer));
1682 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1683 }
1684 }
1685 }
1686 }
1687}
1688
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001689/*==============================================================================
1690 * FUNCTION : updateTimeStampInPendingBuffers
1691 *
1692 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1693 * of a frame number
1694 *
1695 * PARAMETERS :
1696 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1697 * @timestamp : timestamp to be set
1698 *
1699 * RETURN : None
1700 *
1701 *==========================================================================*/
1702void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1703 uint32_t frameNumber, nsecs_t timestamp)
1704{
1705 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1706 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1707 if (req->frame_number != frameNumber)
1708 continue;
1709
1710 for (auto k = req->mPendingBufferList.begin();
1711 k != req->mPendingBufferList.end(); k++ ) {
1712 struct private_handle_t *priv_handle =
1713 (struct private_handle_t *) (*(k->buffer));
1714 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1715 }
1716 }
1717 return;
1718}
1719
Thierry Strudel3d639192016-09-09 11:52:26 -07001720/*===========================================================================
1721 * FUNCTION : configureStreams
1722 *
1723 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1724 * and output streams.
1725 *
1726 * PARAMETERS :
1727 * @stream_list : streams to be configured
1728 *
1729 * RETURN :
1730 *
1731 *==========================================================================*/
1732int QCamera3HardwareInterface::configureStreams(
1733 camera3_stream_configuration_t *streamList)
1734{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001735 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001736 int rc = 0;
1737
1738 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001739 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001740 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001741 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001742
1743 return rc;
1744}
1745
1746/*===========================================================================
1747 * FUNCTION : configureStreamsPerfLocked
1748 *
1749 * DESCRIPTION: configureStreams while perfLock is held.
1750 *
1751 * PARAMETERS :
1752 * @stream_list : streams to be configured
1753 *
1754 * RETURN : int32_t type of status
1755 * NO_ERROR -- success
1756 * none-zero failure code
1757 *==========================================================================*/
1758int QCamera3HardwareInterface::configureStreamsPerfLocked(
1759 camera3_stream_configuration_t *streamList)
1760{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001761 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001762 int rc = 0;
1763
1764 // Sanity check stream_list
1765 if (streamList == NULL) {
1766 LOGE("NULL stream configuration");
1767 return BAD_VALUE;
1768 }
1769 if (streamList->streams == NULL) {
1770 LOGE("NULL stream list");
1771 return BAD_VALUE;
1772 }
1773
1774 if (streamList->num_streams < 1) {
1775 LOGE("Bad number of streams requested: %d",
1776 streamList->num_streams);
1777 return BAD_VALUE;
1778 }
1779
1780 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1781 LOGE("Maximum number of streams %d exceeded: %d",
1782 MAX_NUM_STREAMS, streamList->num_streams);
1783 return BAD_VALUE;
1784 }
1785
Jason Leec4cf5032017-05-24 18:31:41 -07001786 mOpMode = streamList->operation_mode;
1787 LOGD("mOpMode: %d", mOpMode);
1788
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001789 rc = validateUsageFlags(streamList);
1790 if (rc != NO_ERROR) {
1791 return rc;
1792 }
1793
Thierry Strudel3d639192016-09-09 11:52:26 -07001794 /* first invalidate all the steams in the mStreamList
1795 * if they appear again, they will be validated */
1796 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1797 it != mStreamInfo.end(); it++) {
1798 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1799 if (channel) {
1800 channel->stop();
1801 }
1802 (*it)->status = INVALID;
1803 }
1804
1805 if (mRawDumpChannel) {
1806 mRawDumpChannel->stop();
1807 delete mRawDumpChannel;
1808 mRawDumpChannel = NULL;
1809 }
1810
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001811 if (mHdrPlusRawSrcChannel) {
1812 mHdrPlusRawSrcChannel->stop();
1813 delete mHdrPlusRawSrcChannel;
1814 mHdrPlusRawSrcChannel = NULL;
1815 }
1816
Thierry Strudel3d639192016-09-09 11:52:26 -07001817 if (mSupportChannel)
1818 mSupportChannel->stop();
1819
1820 if (mAnalysisChannel) {
1821 mAnalysisChannel->stop();
1822 }
1823 if (mMetadataChannel) {
1824 /* If content of mStreamInfo is not 0, there is metadata stream */
1825 mMetadataChannel->stop();
1826 }
1827 if (mChannelHandle) {
1828 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001829 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 LOGD("stopping channel %d", mChannelHandle);
1831 }
1832
1833 pthread_mutex_lock(&mMutex);
1834
1835 // Check state
1836 switch (mState) {
1837 case INITIALIZED:
1838 case CONFIGURED:
1839 case STARTED:
1840 /* valid state */
1841 break;
1842 default:
1843 LOGE("Invalid state %d", mState);
1844 pthread_mutex_unlock(&mMutex);
1845 return -ENODEV;
1846 }
1847
1848 /* Check whether we have video stream */
1849 m_bIs4KVideo = false;
1850 m_bIsVideo = false;
1851 m_bEisSupportedSize = false;
1852 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001853 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001855 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001856 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001857 uint32_t videoWidth = 0U;
1858 uint32_t videoHeight = 0U;
1859 size_t rawStreamCnt = 0;
1860 size_t stallStreamCnt = 0;
1861 size_t processedStreamCnt = 0;
1862 // Number of streams on ISP encoder path
1863 size_t numStreamsOnEncoder = 0;
1864 size_t numYuv888OnEncoder = 0;
1865 bool bYuv888OverrideJpeg = false;
1866 cam_dimension_t largeYuv888Size = {0, 0};
1867 cam_dimension_t maxViewfinderSize = {0, 0};
1868 bool bJpegExceeds4K = false;
1869 bool bJpegOnEncoder = false;
1870 bool bUseCommonFeatureMask = false;
1871 cam_feature_mask_t commonFeatureMask = 0;
1872 bool bSmallJpegSize = false;
1873 uint32_t width_ratio;
1874 uint32_t height_ratio;
1875 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1876 camera3_stream_t *inputStream = NULL;
1877 bool isJpeg = false;
1878 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001879 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001880 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001881
1882 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1883
1884 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001885 uint8_t eis_prop_set;
1886 uint32_t maxEisWidth = 0;
1887 uint32_t maxEisHeight = 0;
1888
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001889 // Initialize all instant AEC related variables
1890 mInstantAEC = false;
1891 mResetInstantAEC = false;
1892 mInstantAECSettledFrameNumber = 0;
1893 mAecSkipDisplayFrameBound = 0;
1894 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001895 mCurrFeatureState = 0;
1896 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001897
Thierry Strudel3d639192016-09-09 11:52:26 -07001898 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1899
1900 size_t count = IS_TYPE_MAX;
1901 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1902 for (size_t i = 0; i < count; i++) {
1903 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001904 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1905 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001906 break;
1907 }
1908 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001909
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001910 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001911 maxEisWidth = MAX_EIS_WIDTH;
1912 maxEisHeight = MAX_EIS_HEIGHT;
1913 }
1914
1915 /* EIS setprop control */
1916 char eis_prop[PROPERTY_VALUE_MAX];
1917 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001918 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001919 eis_prop_set = (uint8_t)atoi(eis_prop);
1920
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001921 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001922 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1923
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001924 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1925 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001926
Thierry Strudel3d639192016-09-09 11:52:26 -07001927 /* stream configurations */
1928 for (size_t i = 0; i < streamList->num_streams; i++) {
1929 camera3_stream_t *newStream = streamList->streams[i];
1930 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1931 "height = %d, rotation = %d, usage = 0x%x",
1932 i, newStream->stream_type, newStream->format,
1933 newStream->width, newStream->height, newStream->rotation,
1934 newStream->usage);
1935 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1936 newStream->stream_type == CAMERA3_STREAM_INPUT){
1937 isZsl = true;
1938 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001939 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1940 IS_USAGE_PREVIEW(newStream->usage)) {
1941 isPreview = true;
1942 }
1943
Thierry Strudel3d639192016-09-09 11:52:26 -07001944 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1945 inputStream = newStream;
1946 }
1947
Emilian Peev7650c122017-01-19 08:24:33 -08001948 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1949 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001950 isJpeg = true;
1951 jpegSize.width = newStream->width;
1952 jpegSize.height = newStream->height;
1953 if (newStream->width > VIDEO_4K_WIDTH ||
1954 newStream->height > VIDEO_4K_HEIGHT)
1955 bJpegExceeds4K = true;
1956 }
1957
1958 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1959 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1960 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001961 // In HAL3 we can have multiple different video streams.
1962 // The variables video width and height are used below as
1963 // dimensions of the biggest of them
1964 if (videoWidth < newStream->width ||
1965 videoHeight < newStream->height) {
1966 videoWidth = newStream->width;
1967 videoHeight = newStream->height;
1968 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001969 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1970 (VIDEO_4K_HEIGHT <= newStream->height)) {
1971 m_bIs4KVideo = true;
1972 }
1973 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1974 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001975
Thierry Strudel3d639192016-09-09 11:52:26 -07001976 }
1977 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1978 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1979 switch (newStream->format) {
1980 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001981 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1982 depthPresent = true;
1983 break;
1984 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001985 stallStreamCnt++;
1986 if (isOnEncoder(maxViewfinderSize, newStream->width,
1987 newStream->height)) {
1988 numStreamsOnEncoder++;
1989 bJpegOnEncoder = true;
1990 }
1991 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1992 newStream->width);
1993 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1994 newStream->height);;
1995 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1996 "FATAL: max_downscale_factor cannot be zero and so assert");
1997 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1998 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1999 LOGH("Setting small jpeg size flag to true");
2000 bSmallJpegSize = true;
2001 }
2002 break;
2003 case HAL_PIXEL_FORMAT_RAW10:
2004 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2005 case HAL_PIXEL_FORMAT_RAW16:
2006 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002007 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2008 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2009 pdStatCount++;
2010 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002011 break;
2012 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2013 processedStreamCnt++;
2014 if (isOnEncoder(maxViewfinderSize, newStream->width,
2015 newStream->height)) {
2016 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2017 !IS_USAGE_ZSL(newStream->usage)) {
2018 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2019 }
2020 numStreamsOnEncoder++;
2021 }
2022 break;
2023 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2024 processedStreamCnt++;
2025 if (isOnEncoder(maxViewfinderSize, newStream->width,
2026 newStream->height)) {
2027 // If Yuv888 size is not greater than 4K, set feature mask
2028 // to SUPERSET so that it support concurrent request on
2029 // YUV and JPEG.
2030 if (newStream->width <= VIDEO_4K_WIDTH &&
2031 newStream->height <= VIDEO_4K_HEIGHT) {
2032 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2033 }
2034 numStreamsOnEncoder++;
2035 numYuv888OnEncoder++;
2036 largeYuv888Size.width = newStream->width;
2037 largeYuv888Size.height = newStream->height;
2038 }
2039 break;
2040 default:
2041 processedStreamCnt++;
2042 if (isOnEncoder(maxViewfinderSize, newStream->width,
2043 newStream->height)) {
2044 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2045 numStreamsOnEncoder++;
2046 }
2047 break;
2048 }
2049
2050 }
2051 }
2052
2053 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2054 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2055 !m_bIsVideo) {
2056 m_bEisEnable = false;
2057 }
2058
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002059 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2060 pthread_mutex_unlock(&mMutex);
2061 return -EINVAL;
2062 }
2063
Thierry Strudel54dc9782017-02-15 12:12:10 -08002064 uint8_t forceEnableTnr = 0;
2065 char tnr_prop[PROPERTY_VALUE_MAX];
2066 memset(tnr_prop, 0, sizeof(tnr_prop));
2067 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2068 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2069
Thierry Strudel3d639192016-09-09 11:52:26 -07002070 /* Logic to enable/disable TNR based on specific config size/etc.*/
2071 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002072 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2073 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002074 else if (forceEnableTnr)
2075 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002076
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002077 char videoHdrProp[PROPERTY_VALUE_MAX];
2078 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2079 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2080 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2081
2082 if (hdr_mode_prop == 1 && m_bIsVideo &&
2083 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2084 m_bVideoHdrEnabled = true;
2085 else
2086 m_bVideoHdrEnabled = false;
2087
2088
Thierry Strudel3d639192016-09-09 11:52:26 -07002089 /* Check if num_streams is sane */
2090 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2091 rawStreamCnt > MAX_RAW_STREAMS ||
2092 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2093 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2094 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2095 pthread_mutex_unlock(&mMutex);
2096 return -EINVAL;
2097 }
2098 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002099 if (isZsl && m_bIs4KVideo) {
2100 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002101 pthread_mutex_unlock(&mMutex);
2102 return -EINVAL;
2103 }
2104 /* Check if stream sizes are sane */
2105 if (numStreamsOnEncoder > 2) {
2106 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2107 pthread_mutex_unlock(&mMutex);
2108 return -EINVAL;
2109 } else if (1 < numStreamsOnEncoder){
2110 bUseCommonFeatureMask = true;
2111 LOGH("Multiple streams above max viewfinder size, common mask needed");
2112 }
2113
2114 /* Check if BLOB size is greater than 4k in 4k recording case */
2115 if (m_bIs4KVideo && bJpegExceeds4K) {
2116 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2117 pthread_mutex_unlock(&mMutex);
2118 return -EINVAL;
2119 }
2120
Emilian Peev7650c122017-01-19 08:24:33 -08002121 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2122 depthPresent) {
2123 LOGE("HAL doesn't support depth streams in HFR mode!");
2124 pthread_mutex_unlock(&mMutex);
2125 return -EINVAL;
2126 }
2127
Thierry Strudel3d639192016-09-09 11:52:26 -07002128 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2129 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2130 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2131 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2132 // configurations:
2133 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2134 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2135 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2136 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2137 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2138 __func__);
2139 pthread_mutex_unlock(&mMutex);
2140 return -EINVAL;
2141 }
2142
2143 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2144 // the YUV stream's size is greater or equal to the JPEG size, set common
2145 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2146 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2147 jpegSize.width, jpegSize.height) &&
2148 largeYuv888Size.width > jpegSize.width &&
2149 largeYuv888Size.height > jpegSize.height) {
2150 bYuv888OverrideJpeg = true;
2151 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2152 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2153 }
2154
2155 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2156 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2157 commonFeatureMask);
2158 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2159 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2160
2161 rc = validateStreamDimensions(streamList);
2162 if (rc == NO_ERROR) {
2163 rc = validateStreamRotations(streamList);
2164 }
2165 if (rc != NO_ERROR) {
2166 LOGE("Invalid stream configuration requested!");
2167 pthread_mutex_unlock(&mMutex);
2168 return rc;
2169 }
2170
Emilian Peev0f3c3162017-03-15 12:57:46 +00002171 if (1 < pdStatCount) {
2172 LOGE("HAL doesn't support multiple PD streams");
2173 pthread_mutex_unlock(&mMutex);
2174 return -EINVAL;
2175 }
2176
2177 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2178 (1 == pdStatCount)) {
2179 LOGE("HAL doesn't support PD streams in HFR mode!");
2180 pthread_mutex_unlock(&mMutex);
2181 return -EINVAL;
2182 }
2183
Thierry Strudel3d639192016-09-09 11:52:26 -07002184 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2185 for (size_t i = 0; i < streamList->num_streams; i++) {
2186 camera3_stream_t *newStream = streamList->streams[i];
2187 LOGH("newStream type = %d, stream format = %d "
2188 "stream size : %d x %d, stream rotation = %d",
2189 newStream->stream_type, newStream->format,
2190 newStream->width, newStream->height, newStream->rotation);
2191 //if the stream is in the mStreamList validate it
2192 bool stream_exists = false;
2193 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2194 it != mStreamInfo.end(); it++) {
2195 if ((*it)->stream == newStream) {
2196 QCamera3ProcessingChannel *channel =
2197 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2198 stream_exists = true;
2199 if (channel)
2200 delete channel;
2201 (*it)->status = VALID;
2202 (*it)->stream->priv = NULL;
2203 (*it)->channel = NULL;
2204 }
2205 }
2206 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2207 //new stream
2208 stream_info_t* stream_info;
2209 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2210 if (!stream_info) {
2211 LOGE("Could not allocate stream info");
2212 rc = -ENOMEM;
2213 pthread_mutex_unlock(&mMutex);
2214 return rc;
2215 }
2216 stream_info->stream = newStream;
2217 stream_info->status = VALID;
2218 stream_info->channel = NULL;
2219 mStreamInfo.push_back(stream_info);
2220 }
2221 /* Covers Opaque ZSL and API1 F/W ZSL */
2222 if (IS_USAGE_ZSL(newStream->usage)
2223 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2224 if (zslStream != NULL) {
2225 LOGE("Multiple input/reprocess streams requested!");
2226 pthread_mutex_unlock(&mMutex);
2227 return BAD_VALUE;
2228 }
2229 zslStream = newStream;
2230 }
2231 /* Covers YUV reprocess */
2232 if (inputStream != NULL) {
2233 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2234 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2235 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2236 && inputStream->width == newStream->width
2237 && inputStream->height == newStream->height) {
2238 if (zslStream != NULL) {
2239 /* This scenario indicates multiple YUV streams with same size
2240 * as input stream have been requested, since zsl stream handle
2241 * is solely use for the purpose of overriding the size of streams
2242 * which share h/w streams we will just make a guess here as to
2243 * which of the stream is a ZSL stream, this will be refactored
2244 * once we make generic logic for streams sharing encoder output
2245 */
2246 LOGH("Warning, Multiple ip/reprocess streams requested!");
2247 }
2248 zslStream = newStream;
2249 }
2250 }
2251 }
2252
2253 /* If a zsl stream is set, we know that we have configured at least one input or
2254 bidirectional stream */
2255 if (NULL != zslStream) {
2256 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2257 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2258 mInputStreamInfo.format = zslStream->format;
2259 mInputStreamInfo.usage = zslStream->usage;
2260 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2261 mInputStreamInfo.dim.width,
2262 mInputStreamInfo.dim.height,
2263 mInputStreamInfo.format, mInputStreamInfo.usage);
2264 }
2265
2266 cleanAndSortStreamInfo();
2267 if (mMetadataChannel) {
2268 delete mMetadataChannel;
2269 mMetadataChannel = NULL;
2270 }
2271 if (mSupportChannel) {
2272 delete mSupportChannel;
2273 mSupportChannel = NULL;
2274 }
2275
2276 if (mAnalysisChannel) {
2277 delete mAnalysisChannel;
2278 mAnalysisChannel = NULL;
2279 }
2280
2281 if (mDummyBatchChannel) {
2282 delete mDummyBatchChannel;
2283 mDummyBatchChannel = NULL;
2284 }
2285
Emilian Peev7650c122017-01-19 08:24:33 -08002286 if (mDepthChannel) {
2287 mDepthChannel = NULL;
2288 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002289 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002290
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002291 mShutterDispatcher.clear();
2292 mOutputBufferDispatcher.clear();
2293
Thierry Strudel2896d122017-02-23 19:18:03 -08002294 char is_type_value[PROPERTY_VALUE_MAX];
2295 property_get("persist.camera.is_type", is_type_value, "4");
2296 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2297
Binhao Line406f062017-05-03 14:39:44 -07002298 char property_value[PROPERTY_VALUE_MAX];
2299 property_get("persist.camera.gzoom.at", property_value, "0");
2300 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002301 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2302 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2303 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2304 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002305
2306 property_get("persist.camera.gzoom.4k", property_value, "0");
2307 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2308
Thierry Strudel3d639192016-09-09 11:52:26 -07002309 //Create metadata channel and initialize it
2310 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2311 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2312 gCamCapability[mCameraId]->color_arrangement);
2313 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2314 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002315 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002316 if (mMetadataChannel == NULL) {
2317 LOGE("failed to allocate metadata channel");
2318 rc = -ENOMEM;
2319 pthread_mutex_unlock(&mMutex);
2320 return rc;
2321 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002322 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002323 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2324 if (rc < 0) {
2325 LOGE("metadata channel initialization failed");
2326 delete mMetadataChannel;
2327 mMetadataChannel = NULL;
2328 pthread_mutex_unlock(&mMutex);
2329 return rc;
2330 }
2331
Thierry Strudel2896d122017-02-23 19:18:03 -08002332 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002333 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002334 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002335 // Keep track of preview/video streams indices.
2336 // There could be more than one preview streams, but only one video stream.
2337 int32_t video_stream_idx = -1;
2338 int32_t preview_stream_idx[streamList->num_streams];
2339 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002340 bool previewTnr[streamList->num_streams];
2341 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2342 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2343 // Loop through once to determine preview TNR conditions before creating channels.
2344 for (size_t i = 0; i < streamList->num_streams; i++) {
2345 camera3_stream_t *newStream = streamList->streams[i];
2346 uint32_t stream_usage = newStream->usage;
2347 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2348 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2349 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2350 video_stream_idx = (int32_t)i;
2351 else
2352 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2353 }
2354 }
2355 // By default, preview stream TNR is disabled.
2356 // Enable TNR to the preview stream if all conditions below are satisfied:
2357 // 1. preview resolution == video resolution.
2358 // 2. video stream TNR is enabled.
2359 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2360 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2361 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2362 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2363 if (m_bTnrEnabled && m_bTnrVideo &&
2364 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2365 video_stream->width == preview_stream->width &&
2366 video_stream->height == preview_stream->height) {
2367 previewTnr[preview_stream_idx[i]] = true;
2368 }
2369 }
2370
Thierry Strudel3d639192016-09-09 11:52:26 -07002371 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2372 /* Allocate channel objects for the requested streams */
2373 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002374
Thierry Strudel3d639192016-09-09 11:52:26 -07002375 camera3_stream_t *newStream = streamList->streams[i];
2376 uint32_t stream_usage = newStream->usage;
2377 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2378 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2379 struct camera_info *p_info = NULL;
2380 pthread_mutex_lock(&gCamLock);
2381 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2382 pthread_mutex_unlock(&gCamLock);
2383 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2384 || IS_USAGE_ZSL(newStream->usage)) &&
2385 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002386 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002387 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002388 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2389 if (bUseCommonFeatureMask)
2390 zsl_ppmask = commonFeatureMask;
2391 else
2392 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002393 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002394 if (numStreamsOnEncoder > 0)
2395 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2396 else
2397 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002398 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002399 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002400 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002401 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002402 LOGH("Input stream configured, reprocess config");
2403 } else {
2404 //for non zsl streams find out the format
2405 switch (newStream->format) {
2406 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2407 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002408 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002409 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2410 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2411 /* add additional features to pp feature mask */
2412 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2413 mStreamConfigInfo.num_streams);
2414
2415 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2416 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2417 CAM_STREAM_TYPE_VIDEO;
2418 if (m_bTnrEnabled && m_bTnrVideo) {
2419 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2420 CAM_QCOM_FEATURE_CPP_TNR;
2421 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2422 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2423 ~CAM_QCOM_FEATURE_CDS;
2424 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002425 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2426 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2427 CAM_QTI_FEATURE_PPEISCORE;
2428 }
Binhao Line406f062017-05-03 14:39:44 -07002429 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2430 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2431 CAM_QCOM_FEATURE_GOOG_ZOOM;
2432 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002433 } else {
2434 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2435 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002436 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2438 CAM_QCOM_FEATURE_CPP_TNR;
2439 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2440 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2441 ~CAM_QCOM_FEATURE_CDS;
2442 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002443 if(!m_bSwTnrPreview) {
2444 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2445 ~CAM_QTI_FEATURE_SW_TNR;
2446 }
Binhao Line406f062017-05-03 14:39:44 -07002447 if (is_goog_zoom_preview_enabled) {
2448 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2449 CAM_QCOM_FEATURE_GOOG_ZOOM;
2450 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002451 padding_info.width_padding = mSurfaceStridePadding;
2452 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002453 previewSize.width = (int32_t)newStream->width;
2454 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002455 }
2456 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2457 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2458 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2459 newStream->height;
2460 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2461 newStream->width;
2462 }
2463 }
2464 break;
2465 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002466 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002467 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2468 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2469 if (bUseCommonFeatureMask)
2470 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2471 commonFeatureMask;
2472 else
2473 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2474 CAM_QCOM_FEATURE_NONE;
2475 } else {
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2477 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2478 }
2479 break;
2480 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002481 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002482 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2483 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2484 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2485 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2486 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002487 /* Remove rotation if it is not supported
2488 for 4K LiveVideo snapshot case (online processing) */
2489 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2490 CAM_QCOM_FEATURE_ROTATION)) {
2491 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2492 &= ~CAM_QCOM_FEATURE_ROTATION;
2493 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002494 } else {
2495 if (bUseCommonFeatureMask &&
2496 isOnEncoder(maxViewfinderSize, newStream->width,
2497 newStream->height)) {
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2499 } else {
2500 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2501 }
2502 }
2503 if (isZsl) {
2504 if (zslStream) {
2505 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2506 (int32_t)zslStream->width;
2507 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2508 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2510 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002511 } else {
2512 LOGE("Error, No ZSL stream identified");
2513 pthread_mutex_unlock(&mMutex);
2514 return -EINVAL;
2515 }
2516 } else if (m_bIs4KVideo) {
2517 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2518 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2519 } else if (bYuv888OverrideJpeg) {
2520 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2521 (int32_t)largeYuv888Size.width;
2522 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2523 (int32_t)largeYuv888Size.height;
2524 }
2525 break;
2526 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2527 case HAL_PIXEL_FORMAT_RAW16:
2528 case HAL_PIXEL_FORMAT_RAW10:
2529 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2530 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2531 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002532 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2533 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2534 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2535 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2536 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2537 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2538 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2539 gCamCapability[mCameraId]->dt[mPDIndex];
2540 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2541 gCamCapability[mCameraId]->vc[mPDIndex];
2542 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002543 break;
2544 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002545 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002546 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2547 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2548 break;
2549 }
2550 }
2551
2552 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2553 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2554 gCamCapability[mCameraId]->color_arrangement);
2555
2556 if (newStream->priv == NULL) {
2557 //New stream, construct channel
2558 switch (newStream->stream_type) {
2559 case CAMERA3_STREAM_INPUT:
2560 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2561 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2562 break;
2563 case CAMERA3_STREAM_BIDIRECTIONAL:
2564 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2565 GRALLOC_USAGE_HW_CAMERA_WRITE;
2566 break;
2567 case CAMERA3_STREAM_OUTPUT:
2568 /* For video encoding stream, set read/write rarely
2569 * flag so that they may be set to un-cached */
2570 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2571 newStream->usage |=
2572 (GRALLOC_USAGE_SW_READ_RARELY |
2573 GRALLOC_USAGE_SW_WRITE_RARELY |
2574 GRALLOC_USAGE_HW_CAMERA_WRITE);
2575 else if (IS_USAGE_ZSL(newStream->usage))
2576 {
2577 LOGD("ZSL usage flag skipping");
2578 }
2579 else if (newStream == zslStream
2580 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2581 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2582 } else
2583 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2584 break;
2585 default:
2586 LOGE("Invalid stream_type %d", newStream->stream_type);
2587 break;
2588 }
2589
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002590 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002591 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2592 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2593 QCamera3ProcessingChannel *channel = NULL;
2594 switch (newStream->format) {
2595 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2596 if ((newStream->usage &
2597 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2598 (streamList->operation_mode ==
2599 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2600 ) {
2601 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2602 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002603 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002604 this,
2605 newStream,
2606 (cam_stream_type_t)
2607 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2608 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2609 mMetadataChannel,
2610 0); //heap buffers are not required for HFR video channel
2611 if (channel == NULL) {
2612 LOGE("allocation of channel failed");
2613 pthread_mutex_unlock(&mMutex);
2614 return -ENOMEM;
2615 }
2616 //channel->getNumBuffers() will return 0 here so use
2617 //MAX_INFLIGH_HFR_REQUESTS
2618 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2619 newStream->priv = channel;
2620 LOGI("num video buffers in HFR mode: %d",
2621 MAX_INFLIGHT_HFR_REQUESTS);
2622 } else {
2623 /* Copy stream contents in HFR preview only case to create
2624 * dummy batch channel so that sensor streaming is in
2625 * HFR mode */
2626 if (!m_bIsVideo && (streamList->operation_mode ==
2627 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2628 mDummyBatchStream = *newStream;
2629 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002630 int bufferCount = MAX_INFLIGHT_REQUESTS;
2631 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2632 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002633 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2634 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2635 bufferCount = m_bIs4KVideo ?
2636 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2637 }
2638
Thierry Strudel2896d122017-02-23 19:18:03 -08002639 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002640 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2641 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002642 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002643 this,
2644 newStream,
2645 (cam_stream_type_t)
2646 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2647 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2648 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002649 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002650 if (channel == NULL) {
2651 LOGE("allocation of channel failed");
2652 pthread_mutex_unlock(&mMutex);
2653 return -ENOMEM;
2654 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002655 /* disable UBWC for preview, though supported,
2656 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002657 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002658 (previewSize.width == (int32_t)videoWidth)&&
2659 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002660 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002661 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002662 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002663 /* When goog_zoom is linked to the preview or video stream,
2664 * disable ubwc to the linked stream */
2665 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2666 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2667 channel->setUBWCEnabled(false);
2668 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002669 newStream->max_buffers = channel->getNumBuffers();
2670 newStream->priv = channel;
2671 }
2672 break;
2673 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2674 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2675 mChannelHandle,
2676 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002677 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 this,
2679 newStream,
2680 (cam_stream_type_t)
2681 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2682 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2683 mMetadataChannel);
2684 if (channel == NULL) {
2685 LOGE("allocation of YUV channel failed");
2686 pthread_mutex_unlock(&mMutex);
2687 return -ENOMEM;
2688 }
2689 newStream->max_buffers = channel->getNumBuffers();
2690 newStream->priv = channel;
2691 break;
2692 }
2693 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2694 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002695 case HAL_PIXEL_FORMAT_RAW10: {
2696 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2697 (HAL_DATASPACE_DEPTH != newStream->data_space))
2698 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002699 mRawChannel = new QCamera3RawChannel(
2700 mCameraHandle->camera_handle, mChannelHandle,
2701 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002702 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002703 this, newStream,
2704 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002705 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 if (mRawChannel == NULL) {
2707 LOGE("allocation of raw channel failed");
2708 pthread_mutex_unlock(&mMutex);
2709 return -ENOMEM;
2710 }
2711 newStream->max_buffers = mRawChannel->getNumBuffers();
2712 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2713 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002714 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002715 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002716 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2717 mDepthChannel = new QCamera3DepthChannel(
2718 mCameraHandle->camera_handle, mChannelHandle,
2719 mCameraHandle->ops, NULL, NULL, &padding_info,
2720 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2721 mMetadataChannel);
2722 if (NULL == mDepthChannel) {
2723 LOGE("Allocation of depth channel failed");
2724 pthread_mutex_unlock(&mMutex);
2725 return NO_MEMORY;
2726 }
2727 newStream->priv = mDepthChannel;
2728 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2729 } else {
2730 // Max live snapshot inflight buffer is 1. This is to mitigate
2731 // frame drop issues for video snapshot. The more buffers being
2732 // allocated, the more frame drops there are.
2733 mPictureChannel = new QCamera3PicChannel(
2734 mCameraHandle->camera_handle, mChannelHandle,
2735 mCameraHandle->ops, captureResultCb,
2736 setBufferErrorStatus, &padding_info, this, newStream,
2737 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2738 m_bIs4KVideo, isZsl, mMetadataChannel,
2739 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2740 if (mPictureChannel == NULL) {
2741 LOGE("allocation of channel failed");
2742 pthread_mutex_unlock(&mMutex);
2743 return -ENOMEM;
2744 }
2745 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2746 newStream->max_buffers = mPictureChannel->getNumBuffers();
2747 mPictureChannel->overrideYuvSize(
2748 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2749 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002750 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002751 break;
2752
2753 default:
2754 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002755 pthread_mutex_unlock(&mMutex);
2756 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002757 }
2758 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2759 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2760 } else {
2761 LOGE("Error, Unknown stream type");
2762 pthread_mutex_unlock(&mMutex);
2763 return -EINVAL;
2764 }
2765
2766 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002767 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002768 // Here we only care whether it's EIS3 or not
2769 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2770 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2771 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2772 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002773 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002774 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002775 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002776 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2777 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2778 }
2779 }
2780
2781 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2782 it != mStreamInfo.end(); it++) {
2783 if ((*it)->stream == newStream) {
2784 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2785 break;
2786 }
2787 }
2788 } else {
2789 // Channel already exists for this stream
2790 // Do nothing for now
2791 }
2792 padding_info = gCamCapability[mCameraId]->padding_info;
2793
Emilian Peev7650c122017-01-19 08:24:33 -08002794 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002795 * since there is no real stream associated with it
2796 */
Emilian Peev7650c122017-01-19 08:24:33 -08002797 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002798 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2799 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002800 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002801 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002802 }
2803
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002804 // Let buffer dispatcher know the configured streams.
2805 mOutputBufferDispatcher.configureStreams(streamList);
2806
Thierry Strudel2896d122017-02-23 19:18:03 -08002807 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2808 onlyRaw = false;
2809 }
2810
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002811 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002812 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002813 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002814 cam_analysis_info_t analysisInfo;
2815 int32_t ret = NO_ERROR;
2816 ret = mCommon.getAnalysisInfo(
2817 FALSE,
2818 analysisFeatureMask,
2819 &analysisInfo);
2820 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002821 cam_color_filter_arrangement_t analysis_color_arrangement =
2822 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2823 CAM_FILTER_ARRANGEMENT_Y :
2824 gCamCapability[mCameraId]->color_arrangement);
2825 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2826 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002827 cam_dimension_t analysisDim;
2828 analysisDim = mCommon.getMatchingDimension(previewSize,
2829 analysisInfo.analysis_recommended_res);
2830
2831 mAnalysisChannel = new QCamera3SupportChannel(
2832 mCameraHandle->camera_handle,
2833 mChannelHandle,
2834 mCameraHandle->ops,
2835 &analysisInfo.analysis_padding_info,
2836 analysisFeatureMask,
2837 CAM_STREAM_TYPE_ANALYSIS,
2838 &analysisDim,
2839 (analysisInfo.analysis_format
2840 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2841 : CAM_FORMAT_YUV_420_NV21),
2842 analysisInfo.hw_analysis_supported,
2843 gCamCapability[mCameraId]->color_arrangement,
2844 this,
2845 0); // force buffer count to 0
2846 } else {
2847 LOGW("getAnalysisInfo failed, ret = %d", ret);
2848 }
2849 if (!mAnalysisChannel) {
2850 LOGW("Analysis channel cannot be created");
2851 }
2852 }
2853
Thierry Strudel3d639192016-09-09 11:52:26 -07002854 //RAW DUMP channel
2855 if (mEnableRawDump && isRawStreamRequested == false){
2856 cam_dimension_t rawDumpSize;
2857 rawDumpSize = getMaxRawSize(mCameraId);
2858 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2859 setPAAFSupport(rawDumpFeatureMask,
2860 CAM_STREAM_TYPE_RAW,
2861 gCamCapability[mCameraId]->color_arrangement);
2862 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2863 mChannelHandle,
2864 mCameraHandle->ops,
2865 rawDumpSize,
2866 &padding_info,
2867 this, rawDumpFeatureMask);
2868 if (!mRawDumpChannel) {
2869 LOGE("Raw Dump channel cannot be created");
2870 pthread_mutex_unlock(&mMutex);
2871 return -ENOMEM;
2872 }
2873 }
2874
Thierry Strudel3d639192016-09-09 11:52:26 -07002875 if (mAnalysisChannel) {
2876 cam_analysis_info_t analysisInfo;
2877 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2878 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2879 CAM_STREAM_TYPE_ANALYSIS;
2880 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2881 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002882 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002883 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2884 &analysisInfo);
2885 if (rc != NO_ERROR) {
2886 LOGE("getAnalysisInfo failed, ret = %d", rc);
2887 pthread_mutex_unlock(&mMutex);
2888 return rc;
2889 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002890 cam_color_filter_arrangement_t analysis_color_arrangement =
2891 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2892 CAM_FILTER_ARRANGEMENT_Y :
2893 gCamCapability[mCameraId]->color_arrangement);
2894 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2895 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2896 analysis_color_arrangement);
2897
Thierry Strudel3d639192016-09-09 11:52:26 -07002898 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002899 mCommon.getMatchingDimension(previewSize,
2900 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002901 mStreamConfigInfo.num_streams++;
2902 }
2903
Thierry Strudel2896d122017-02-23 19:18:03 -08002904 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002905 cam_analysis_info_t supportInfo;
2906 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2907 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2908 setPAAFSupport(callbackFeatureMask,
2909 CAM_STREAM_TYPE_CALLBACK,
2910 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002911 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002912 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002913 if (ret != NO_ERROR) {
2914 /* Ignore the error for Mono camera
2915 * because the PAAF bit mask is only set
2916 * for CAM_STREAM_TYPE_ANALYSIS stream type
2917 */
2918 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2919 LOGW("getAnalysisInfo failed, ret = %d", ret);
2920 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002921 }
2922 mSupportChannel = new QCamera3SupportChannel(
2923 mCameraHandle->camera_handle,
2924 mChannelHandle,
2925 mCameraHandle->ops,
2926 &gCamCapability[mCameraId]->padding_info,
2927 callbackFeatureMask,
2928 CAM_STREAM_TYPE_CALLBACK,
2929 &QCamera3SupportChannel::kDim,
2930 CAM_FORMAT_YUV_420_NV21,
2931 supportInfo.hw_analysis_supported,
2932 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002933 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002934 if (!mSupportChannel) {
2935 LOGE("dummy channel cannot be created");
2936 pthread_mutex_unlock(&mMutex);
2937 return -ENOMEM;
2938 }
2939 }
2940
2941 if (mSupportChannel) {
2942 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2943 QCamera3SupportChannel::kDim;
2944 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2945 CAM_STREAM_TYPE_CALLBACK;
2946 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2947 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2948 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2949 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2950 gCamCapability[mCameraId]->color_arrangement);
2951 mStreamConfigInfo.num_streams++;
2952 }
2953
2954 if (mRawDumpChannel) {
2955 cam_dimension_t rawSize;
2956 rawSize = getMaxRawSize(mCameraId);
2957 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2958 rawSize;
2959 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2960 CAM_STREAM_TYPE_RAW;
2961 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2962 CAM_QCOM_FEATURE_NONE;
2963 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2964 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2965 gCamCapability[mCameraId]->color_arrangement);
2966 mStreamConfigInfo.num_streams++;
2967 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002968
2969 if (mHdrPlusRawSrcChannel) {
2970 cam_dimension_t rawSize;
2971 rawSize = getMaxRawSize(mCameraId);
2972 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2973 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2974 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2975 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2976 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2977 gCamCapability[mCameraId]->color_arrangement);
2978 mStreamConfigInfo.num_streams++;
2979 }
2980
Thierry Strudel3d639192016-09-09 11:52:26 -07002981 /* In HFR mode, if video stream is not added, create a dummy channel so that
2982 * ISP can create a batch mode even for preview only case. This channel is
2983 * never 'start'ed (no stream-on), it is only 'initialized' */
2984 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2985 !m_bIsVideo) {
2986 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2987 setPAAFSupport(dummyFeatureMask,
2988 CAM_STREAM_TYPE_VIDEO,
2989 gCamCapability[mCameraId]->color_arrangement);
2990 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2991 mChannelHandle,
2992 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002993 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002994 this,
2995 &mDummyBatchStream,
2996 CAM_STREAM_TYPE_VIDEO,
2997 dummyFeatureMask,
2998 mMetadataChannel);
2999 if (NULL == mDummyBatchChannel) {
3000 LOGE("creation of mDummyBatchChannel failed."
3001 "Preview will use non-hfr sensor mode ");
3002 }
3003 }
3004 if (mDummyBatchChannel) {
3005 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3006 mDummyBatchStream.width;
3007 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3008 mDummyBatchStream.height;
3009 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3010 CAM_STREAM_TYPE_VIDEO;
3011 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3012 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3013 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3014 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3015 gCamCapability[mCameraId]->color_arrangement);
3016 mStreamConfigInfo.num_streams++;
3017 }
3018
3019 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3020 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003021 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003022 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003023
3024 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3025 for (pendingRequestIterator i = mPendingRequestsList.begin();
3026 i != mPendingRequestsList.end();) {
3027 i = erasePendingRequest(i);
3028 }
3029 mPendingFrameDropList.clear();
3030 // Initialize/Reset the pending buffers list
3031 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3032 req.mPendingBufferList.clear();
3033 }
3034 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003035 mExpectedInflightDuration = 0;
3036 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003037
Thierry Strudel3d639192016-09-09 11:52:26 -07003038 mCurJpegMeta.clear();
3039 //Get min frame duration for this streams configuration
3040 deriveMinFrameDuration();
3041
Chien-Yu Chenee335912017-02-09 17:53:20 -08003042 mFirstPreviewIntentSeen = false;
3043
3044 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003045 {
3046 Mutex::Autolock l(gHdrPlusClientLock);
3047 disableHdrPlusModeLocked();
3048 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003049
Thierry Strudel3d639192016-09-09 11:52:26 -07003050 // Update state
3051 mState = CONFIGURED;
3052
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003053 mFirstMetadataCallback = true;
3054
Thierry Strudel3d639192016-09-09 11:52:26 -07003055 pthread_mutex_unlock(&mMutex);
3056
3057 return rc;
3058}
3059
3060/*===========================================================================
3061 * FUNCTION : validateCaptureRequest
3062 *
3063 * DESCRIPTION: validate a capture request from camera service
3064 *
3065 * PARAMETERS :
3066 * @request : request from framework to process
3067 *
3068 * RETURN :
3069 *
3070 *==========================================================================*/
3071int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003072 camera3_capture_request_t *request,
3073 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003074{
3075 ssize_t idx = 0;
3076 const camera3_stream_buffer_t *b;
3077 CameraMetadata meta;
3078
3079 /* Sanity check the request */
3080 if (request == NULL) {
3081 LOGE("NULL capture request");
3082 return BAD_VALUE;
3083 }
3084
3085 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3086 /*settings cannot be null for the first request*/
3087 return BAD_VALUE;
3088 }
3089
3090 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003091 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3092 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003093 LOGE("Request %d: No output buffers provided!",
3094 __FUNCTION__, frameNumber);
3095 return BAD_VALUE;
3096 }
3097 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3098 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3099 request->num_output_buffers, MAX_NUM_STREAMS);
3100 return BAD_VALUE;
3101 }
3102 if (request->input_buffer != NULL) {
3103 b = request->input_buffer;
3104 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3105 LOGE("Request %d: Buffer %ld: Status not OK!",
3106 frameNumber, (long)idx);
3107 return BAD_VALUE;
3108 }
3109 if (b->release_fence != -1) {
3110 LOGE("Request %d: Buffer %ld: Has a release fence!",
3111 frameNumber, (long)idx);
3112 return BAD_VALUE;
3113 }
3114 if (b->buffer == NULL) {
3115 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3116 frameNumber, (long)idx);
3117 return BAD_VALUE;
3118 }
3119 }
3120
3121 // Validate all buffers
3122 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003123 if (b == NULL) {
3124 return BAD_VALUE;
3125 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003126 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003127 QCamera3ProcessingChannel *channel =
3128 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3129 if (channel == NULL) {
3130 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3131 frameNumber, (long)idx);
3132 return BAD_VALUE;
3133 }
3134 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3135 LOGE("Request %d: Buffer %ld: Status not OK!",
3136 frameNumber, (long)idx);
3137 return BAD_VALUE;
3138 }
3139 if (b->release_fence != -1) {
3140 LOGE("Request %d: Buffer %ld: Has a release fence!",
3141 frameNumber, (long)idx);
3142 return BAD_VALUE;
3143 }
3144 if (b->buffer == NULL) {
3145 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3146 frameNumber, (long)idx);
3147 return BAD_VALUE;
3148 }
3149 if (*(b->buffer) == NULL) {
3150 LOGE("Request %d: Buffer %ld: NULL private handle!",
3151 frameNumber, (long)idx);
3152 return BAD_VALUE;
3153 }
3154 idx++;
3155 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003156 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003157 return NO_ERROR;
3158}
3159
3160/*===========================================================================
3161 * FUNCTION : deriveMinFrameDuration
3162 *
3163 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3164 * on currently configured streams.
3165 *
3166 * PARAMETERS : NONE
3167 *
3168 * RETURN : NONE
3169 *
3170 *==========================================================================*/
3171void QCamera3HardwareInterface::deriveMinFrameDuration()
3172{
3173 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003174 bool hasRaw = false;
3175
3176 mMinRawFrameDuration = 0;
3177 mMinJpegFrameDuration = 0;
3178 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003179
3180 maxJpegDim = 0;
3181 maxProcessedDim = 0;
3182 maxRawDim = 0;
3183
3184 // Figure out maximum jpeg, processed, and raw dimensions
3185 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3186 it != mStreamInfo.end(); it++) {
3187
3188 // Input stream doesn't have valid stream_type
3189 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3190 continue;
3191
3192 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3193 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3194 if (dimension > maxJpegDim)
3195 maxJpegDim = dimension;
3196 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3197 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3198 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003199 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003200 if (dimension > maxRawDim)
3201 maxRawDim = dimension;
3202 } else {
3203 if (dimension > maxProcessedDim)
3204 maxProcessedDim = dimension;
3205 }
3206 }
3207
3208 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3209 MAX_SIZES_CNT);
3210
3211 //Assume all jpeg dimensions are in processed dimensions.
3212 if (maxJpegDim > maxProcessedDim)
3213 maxProcessedDim = maxJpegDim;
3214 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003215 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003216 maxRawDim = INT32_MAX;
3217
3218 for (size_t i = 0; i < count; i++) {
3219 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3220 gCamCapability[mCameraId]->raw_dim[i].height;
3221 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3222 maxRawDim = dimension;
3223 }
3224 }
3225
3226 //Find minimum durations for processed, jpeg, and raw
3227 for (size_t i = 0; i < count; i++) {
3228 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3229 gCamCapability[mCameraId]->raw_dim[i].height) {
3230 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3231 break;
3232 }
3233 }
3234 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3235 for (size_t i = 0; i < count; i++) {
3236 if (maxProcessedDim ==
3237 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3238 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3239 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3240 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3241 break;
3242 }
3243 }
3244}
3245
3246/*===========================================================================
3247 * FUNCTION : getMinFrameDuration
3248 *
3249 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3250 * and current request configuration.
3251 *
3252 * PARAMETERS : @request: requset sent by the frameworks
3253 *
3254 * RETURN : min farme duration for a particular request
3255 *
3256 *==========================================================================*/
3257int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3258{
3259 bool hasJpegStream = false;
3260 bool hasRawStream = false;
3261 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3262 const camera3_stream_t *stream = request->output_buffers[i].stream;
3263 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3264 hasJpegStream = true;
3265 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3266 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3267 stream->format == HAL_PIXEL_FORMAT_RAW16)
3268 hasRawStream = true;
3269 }
3270
3271 if (!hasJpegStream)
3272 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3273 else
3274 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3275}
3276
3277/*===========================================================================
3278 * FUNCTION : handleBuffersDuringFlushLock
3279 *
3280 * DESCRIPTION: Account for buffers returned from back-end during flush
3281 * This function is executed while mMutex is held by the caller.
3282 *
3283 * PARAMETERS :
3284 * @buffer: image buffer for the callback
3285 *
3286 * RETURN :
3287 *==========================================================================*/
3288void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3289{
3290 bool buffer_found = false;
3291 for (List<PendingBuffersInRequest>::iterator req =
3292 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3293 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3294 for (List<PendingBufferInfo>::iterator i =
3295 req->mPendingBufferList.begin();
3296 i != req->mPendingBufferList.end(); i++) {
3297 if (i->buffer == buffer->buffer) {
3298 mPendingBuffersMap.numPendingBufsAtFlush--;
3299 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3300 buffer->buffer, req->frame_number,
3301 mPendingBuffersMap.numPendingBufsAtFlush);
3302 buffer_found = true;
3303 break;
3304 }
3305 }
3306 if (buffer_found) {
3307 break;
3308 }
3309 }
3310 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3311 //signal the flush()
3312 LOGD("All buffers returned to HAL. Continue flush");
3313 pthread_cond_signal(&mBuffersCond);
3314 }
3315}
3316
Thierry Strudel3d639192016-09-09 11:52:26 -07003317/*===========================================================================
3318 * FUNCTION : handleBatchMetadata
3319 *
3320 * DESCRIPTION: Handles metadata buffer callback in batch mode
3321 *
3322 * PARAMETERS : @metadata_buf: metadata buffer
3323 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3324 * the meta buf in this method
3325 *
3326 * RETURN :
3327 *
3328 *==========================================================================*/
3329void QCamera3HardwareInterface::handleBatchMetadata(
3330 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3331{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003332 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003333
3334 if (NULL == metadata_buf) {
3335 LOGE("metadata_buf is NULL");
3336 return;
3337 }
3338 /* In batch mode, the metdata will contain the frame number and timestamp of
3339 * the last frame in the batch. Eg: a batch containing buffers from request
3340 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3341 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3342 * multiple process_capture_results */
3343 metadata_buffer_t *metadata =
3344 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3345 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3346 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3347 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3348 uint32_t frame_number = 0, urgent_frame_number = 0;
3349 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3350 bool invalid_metadata = false;
3351 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3352 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003353 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003354
3355 int32_t *p_frame_number_valid =
3356 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3357 uint32_t *p_frame_number =
3358 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3359 int64_t *p_capture_time =
3360 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3361 int32_t *p_urgent_frame_number_valid =
3362 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3363 uint32_t *p_urgent_frame_number =
3364 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3365
3366 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3367 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3368 (NULL == p_urgent_frame_number)) {
3369 LOGE("Invalid metadata");
3370 invalid_metadata = true;
3371 } else {
3372 frame_number_valid = *p_frame_number_valid;
3373 last_frame_number = *p_frame_number;
3374 last_frame_capture_time = *p_capture_time;
3375 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3376 last_urgent_frame_number = *p_urgent_frame_number;
3377 }
3378
3379 /* In batchmode, when no video buffers are requested, set_parms are sent
3380 * for every capture_request. The difference between consecutive urgent
3381 * frame numbers and frame numbers should be used to interpolate the
3382 * corresponding frame numbers and time stamps */
3383 pthread_mutex_lock(&mMutex);
3384 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003385 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3386 if(idx < 0) {
3387 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3388 last_urgent_frame_number);
3389 mState = ERROR;
3390 pthread_mutex_unlock(&mMutex);
3391 return;
3392 }
3393 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003394 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3395 first_urgent_frame_number;
3396
3397 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3398 urgent_frame_number_valid,
3399 first_urgent_frame_number, last_urgent_frame_number);
3400 }
3401
3402 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003403 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3404 if(idx < 0) {
3405 LOGE("Invalid frame number received: %d. Irrecoverable error",
3406 last_frame_number);
3407 mState = ERROR;
3408 pthread_mutex_unlock(&mMutex);
3409 return;
3410 }
3411 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003412 frameNumDiff = last_frame_number + 1 -
3413 first_frame_number;
3414 mPendingBatchMap.removeItem(last_frame_number);
3415
3416 LOGD("frm: valid: %d frm_num: %d - %d",
3417 frame_number_valid,
3418 first_frame_number, last_frame_number);
3419
3420 }
3421 pthread_mutex_unlock(&mMutex);
3422
3423 if (urgent_frame_number_valid || frame_number_valid) {
3424 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3425 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3426 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3427 urgentFrameNumDiff, last_urgent_frame_number);
3428 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3429 LOGE("frameNumDiff: %d frameNum: %d",
3430 frameNumDiff, last_frame_number);
3431 }
3432
3433 for (size_t i = 0; i < loopCount; i++) {
3434 /* handleMetadataWithLock is called even for invalid_metadata for
3435 * pipeline depth calculation */
3436 if (!invalid_metadata) {
3437 /* Infer frame number. Batch metadata contains frame number of the
3438 * last frame */
3439 if (urgent_frame_number_valid) {
3440 if (i < urgentFrameNumDiff) {
3441 urgent_frame_number =
3442 first_urgent_frame_number + i;
3443 LOGD("inferred urgent frame_number: %d",
3444 urgent_frame_number);
3445 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3446 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3447 } else {
3448 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3449 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3450 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3451 }
3452 }
3453
3454 /* Infer frame number. Batch metadata contains frame number of the
3455 * last frame */
3456 if (frame_number_valid) {
3457 if (i < frameNumDiff) {
3458 frame_number = first_frame_number + i;
3459 LOGD("inferred frame_number: %d", frame_number);
3460 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3461 CAM_INTF_META_FRAME_NUMBER, frame_number);
3462 } else {
3463 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3464 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3465 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3466 }
3467 }
3468
3469 if (last_frame_capture_time) {
3470 //Infer timestamp
3471 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003472 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003473 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003474 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003475 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3476 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3477 LOGD("batch capture_time: %lld, capture_time: %lld",
3478 last_frame_capture_time, capture_time);
3479 }
3480 }
3481 pthread_mutex_lock(&mMutex);
3482 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003483 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003484 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3485 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003486 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003487 pthread_mutex_unlock(&mMutex);
3488 }
3489
3490 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003491 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003492 mMetadataChannel->bufDone(metadata_buf);
3493 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003494 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 }
3496}
3497
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003498void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3499 camera3_error_msg_code_t errorCode)
3500{
3501 camera3_notify_msg_t notify_msg;
3502 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3503 notify_msg.type = CAMERA3_MSG_ERROR;
3504 notify_msg.message.error.error_code = errorCode;
3505 notify_msg.message.error.error_stream = NULL;
3506 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003507 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003508
3509 return;
3510}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003511
3512/*===========================================================================
3513 * FUNCTION : sendPartialMetadataWithLock
3514 *
3515 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3516 *
3517 * PARAMETERS : @metadata: metadata buffer
3518 * @requestIter: The iterator for the pending capture request for
3519 * which the partial result is being sen
3520 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3521 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003522 * @isJumpstartMetadata: Whether this is a partial metadata for
3523 * jumpstart, i.e. even though it doesn't map to a valid partial
3524 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003525 *
3526 * RETURN :
3527 *
3528 *==========================================================================*/
3529
3530void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3531 metadata_buffer_t *metadata,
3532 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003533 bool lastUrgentMetadataInBatch,
3534 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003535{
3536 camera3_capture_result_t result;
3537 memset(&result, 0, sizeof(camera3_capture_result_t));
3538
3539 requestIter->partial_result_cnt++;
3540
3541 // Extract 3A metadata
3542 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003543 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3544 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003545 // Populate metadata result
3546 result.frame_number = requestIter->frame_number;
3547 result.num_output_buffers = 0;
3548 result.output_buffers = NULL;
3549 result.partial_result = requestIter->partial_result_cnt;
3550
3551 {
3552 Mutex::Autolock l(gHdrPlusClientLock);
3553 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3554 // Notify HDR+ client about the partial metadata.
3555 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3556 result.partial_result == PARTIAL_RESULT_COUNT);
3557 }
3558 }
3559
3560 orchestrateResult(&result);
3561 LOGD("urgent frame_number = %u", result.frame_number);
3562 free_camera_metadata((camera_metadata_t *)result.result);
3563}
3564
Thierry Strudel3d639192016-09-09 11:52:26 -07003565/*===========================================================================
3566 * FUNCTION : handleMetadataWithLock
3567 *
3568 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3569 *
3570 * PARAMETERS : @metadata_buf: metadata buffer
3571 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3572 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003573 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3574 * last urgent metadata in a batch. Always true for non-batch mode
3575 * @lastMetadataInBatch: Boolean to indicate whether this is the
3576 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003577 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3578 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003579 *
3580 * RETURN :
3581 *
3582 *==========================================================================*/
3583void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003584 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003585 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3586 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003587{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003588 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003589 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3590 //during flush do not send metadata from this thread
3591 LOGD("not sending metadata during flush or when mState is error");
3592 if (free_and_bufdone_meta_buf) {
3593 mMetadataChannel->bufDone(metadata_buf);
3594 free(metadata_buf);
3595 }
3596 return;
3597 }
3598
3599 //not in flush
3600 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3601 int32_t frame_number_valid, urgent_frame_number_valid;
3602 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003603 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003604 nsecs_t currentSysTime;
3605
3606 int32_t *p_frame_number_valid =
3607 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3608 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3609 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003610 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003611 int32_t *p_urgent_frame_number_valid =
3612 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3613 uint32_t *p_urgent_frame_number =
3614 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3615 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3616 metadata) {
3617 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3618 *p_frame_number_valid, *p_frame_number);
3619 }
3620
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003621 camera_metadata_t *resultMetadata = nullptr;
3622
Thierry Strudel3d639192016-09-09 11:52:26 -07003623 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3624 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3625 LOGE("Invalid metadata");
3626 if (free_and_bufdone_meta_buf) {
3627 mMetadataChannel->bufDone(metadata_buf);
3628 free(metadata_buf);
3629 }
3630 goto done_metadata;
3631 }
3632 frame_number_valid = *p_frame_number_valid;
3633 frame_number = *p_frame_number;
3634 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003635 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003636 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3637 urgent_frame_number = *p_urgent_frame_number;
3638 currentSysTime = systemTime(CLOCK_MONOTONIC);
3639
Jason Lee603176d2017-05-31 11:43:27 -07003640 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3641 const int tries = 3;
3642 nsecs_t bestGap, measured;
3643 for (int i = 0; i < tries; ++i) {
3644 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3645 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3646 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3647 const nsecs_t gap = tmono2 - tmono;
3648 if (i == 0 || gap < bestGap) {
3649 bestGap = gap;
3650 measured = tbase - ((tmono + tmono2) >> 1);
3651 }
3652 }
3653 capture_time -= measured;
3654 }
3655
Thierry Strudel3d639192016-09-09 11:52:26 -07003656 // Detect if buffers from any requests are overdue
3657 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003658 int64_t timeout;
3659 {
3660 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3661 // If there is a pending HDR+ request, the following requests may be blocked until the
3662 // HDR+ request is done. So allow a longer timeout.
3663 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3664 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003665 if (timeout < mExpectedInflightDuration) {
3666 timeout = mExpectedInflightDuration;
3667 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003668 }
3669
3670 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003671 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003672 assert(missed.stream->priv);
3673 if (missed.stream->priv) {
3674 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3675 assert(ch->mStreams[0]);
3676 if (ch->mStreams[0]) {
3677 LOGE("Cancel missing frame = %d, buffer = %p,"
3678 "stream type = %d, stream format = %d",
3679 req.frame_number, missed.buffer,
3680 ch->mStreams[0]->getMyType(), missed.stream->format);
3681 ch->timeoutFrame(req.frame_number);
3682 }
3683 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003684 }
3685 }
3686 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003687 //For the very first metadata callback, regardless whether it contains valid
3688 //frame number, send the partial metadata for the jumpstarting requests.
3689 //Note that this has to be done even if the metadata doesn't contain valid
3690 //urgent frame number, because in the case only 1 request is ever submitted
3691 //to HAL, there won't be subsequent valid urgent frame number.
3692 if (mFirstMetadataCallback) {
3693 for (pendingRequestIterator i =
3694 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3695 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003696 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3697 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003698 }
3699 }
3700 mFirstMetadataCallback = false;
3701 }
3702
Thierry Strudel3d639192016-09-09 11:52:26 -07003703 //Partial result on process_capture_result for timestamp
3704 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003705 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003706
3707 //Recieved an urgent Frame Number, handle it
3708 //using partial results
3709 for (pendingRequestIterator i =
3710 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3711 LOGD("Iterator Frame = %d urgent frame = %d",
3712 i->frame_number, urgent_frame_number);
3713
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003714 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003715 (i->partial_result_cnt == 0)) {
3716 LOGE("Error: HAL missed urgent metadata for frame number %d",
3717 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003718 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003719 }
3720
3721 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003722 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003723 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3724 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003725 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3726 // Instant AEC settled for this frame.
3727 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3728 mInstantAECSettledFrameNumber = urgent_frame_number;
3729 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003730 break;
3731 }
3732 }
3733 }
3734
3735 if (!frame_number_valid) {
3736 LOGD("Not a valid normal frame number, used as SOF only");
3737 if (free_and_bufdone_meta_buf) {
3738 mMetadataChannel->bufDone(metadata_buf);
3739 free(metadata_buf);
3740 }
3741 goto done_metadata;
3742 }
3743 LOGH("valid frame_number = %u, capture_time = %lld",
3744 frame_number, capture_time);
3745
Emilian Peev4e0fe952017-06-30 12:40:09 -07003746 handleDepthDataLocked(metadata->depth_data, frame_number,
3747 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003748
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003749 // Check whether any stream buffer corresponding to this is dropped or not
3750 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3751 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3752 for (auto & pendingRequest : mPendingRequestsList) {
3753 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3754 mInstantAECSettledFrameNumber)) {
3755 camera3_notify_msg_t notify_msg = {};
3756 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003757 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003758 QCamera3ProcessingChannel *channel =
3759 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003760 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003761 if (p_cam_frame_drop) {
3762 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003763 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003764 // Got the stream ID for drop frame.
3765 dropFrame = true;
3766 break;
3767 }
3768 }
3769 } else {
3770 // This is instant AEC case.
3771 // For instant AEC drop the stream untill AEC is settled.
3772 dropFrame = true;
3773 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003774
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003775 if (dropFrame) {
3776 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3777 if (p_cam_frame_drop) {
3778 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003779 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003780 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003781 } else {
3782 // For instant AEC, inform frame drop and frame number
3783 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3784 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003785 pendingRequest.frame_number, streamID,
3786 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003787 }
3788 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003789 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003790 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003791 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003792 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003793 if (p_cam_frame_drop) {
3794 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003795 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003797 } else {
3798 // For instant AEC, inform frame drop and frame number
3799 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3800 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003801 pendingRequest.frame_number, streamID,
3802 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003803 }
3804 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003805 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003806 PendingFrameDrop.stream_ID = streamID;
3807 // Add the Frame drop info to mPendingFrameDropList
3808 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003809 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003810 }
3811 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003813
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003814 for (auto & pendingRequest : mPendingRequestsList) {
3815 // Find the pending request with the frame number.
3816 if (pendingRequest.frame_number == frame_number) {
3817 // Update the sensor timestamp.
3818 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003819
Thierry Strudel3d639192016-09-09 11:52:26 -07003820
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003821 /* Set the timestamp in display metadata so that clients aware of
3822 private_handle such as VT can use this un-modified timestamps.
3823 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003824 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003825
Thierry Strudel3d639192016-09-09 11:52:26 -07003826 // Find channel requiring metadata, meaning internal offline postprocess
3827 // is needed.
3828 //TODO: for now, we don't support two streams requiring metadata at the same time.
3829 // (because we are not making copies, and metadata buffer is not reference counted.
3830 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003831 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3832 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003833 if (iter->need_metadata) {
3834 internalPproc = true;
3835 QCamera3ProcessingChannel *channel =
3836 (QCamera3ProcessingChannel *)iter->stream->priv;
3837 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003838 if(p_is_metabuf_queued != NULL) {
3839 *p_is_metabuf_queued = true;
3840 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003841 break;
3842 }
3843 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003844 for (auto itr = pendingRequest.internalRequestList.begin();
3845 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003846 if (itr->need_metadata) {
3847 internalPproc = true;
3848 QCamera3ProcessingChannel *channel =
3849 (QCamera3ProcessingChannel *)itr->stream->priv;
3850 channel->queueReprocMetadata(metadata_buf);
3851 break;
3852 }
3853 }
3854
Thierry Strudel54dc9782017-02-15 12:12:10 -08003855 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003856
3857 bool *enableZsl = nullptr;
3858 if (gExposeEnableZslKey) {
3859 enableZsl = &pendingRequest.enableZsl;
3860 }
3861
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003862 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003863 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003864 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003865
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003866 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003867
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003868 if (pendingRequest.blob_request) {
3869 //Dump tuning metadata if enabled and available
3870 char prop[PROPERTY_VALUE_MAX];
3871 memset(prop, 0, sizeof(prop));
3872 property_get("persist.camera.dumpmetadata", prop, "0");
3873 int32_t enabled = atoi(prop);
3874 if (enabled && metadata->is_tuning_params_valid) {
3875 dumpMetadataToFile(metadata->tuning_params,
3876 mMetaFrameCount,
3877 enabled,
3878 "Snapshot",
3879 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003880 }
3881 }
3882
3883 if (!internalPproc) {
3884 LOGD("couldn't find need_metadata for this metadata");
3885 // Return metadata buffer
3886 if (free_and_bufdone_meta_buf) {
3887 mMetadataChannel->bufDone(metadata_buf);
3888 free(metadata_buf);
3889 }
3890 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003891
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003892 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003893 }
3894 }
3895
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003896 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3897
3898 // Try to send out capture result metadata.
3899 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003900 return;
3901
Thierry Strudel3d639192016-09-09 11:52:26 -07003902done_metadata:
3903 for (pendingRequestIterator i = mPendingRequestsList.begin();
3904 i != mPendingRequestsList.end() ;i++) {
3905 i->pipeline_depth++;
3906 }
3907 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3908 unblockRequestIfNecessary();
3909}
3910
3911/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003912 * FUNCTION : handleDepthDataWithLock
3913 *
3914 * DESCRIPTION: Handles incoming depth data
3915 *
3916 * PARAMETERS : @depthData : Depth data
3917 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003918 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003919 *
3920 * RETURN :
3921 *
3922 *==========================================================================*/
3923void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003924 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003925 uint32_t currentFrameNumber;
3926 buffer_handle_t *depthBuffer;
3927
3928 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003929 return;
3930 }
3931
3932 camera3_stream_buffer_t resultBuffer =
3933 {.acquire_fence = -1,
3934 .release_fence = -1,
3935 .status = CAMERA3_BUFFER_STATUS_OK,
3936 .buffer = nullptr,
3937 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003938 do {
3939 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3940 if (nullptr == depthBuffer) {
3941 break;
3942 }
3943
Emilian Peev7650c122017-01-19 08:24:33 -08003944 resultBuffer.buffer = depthBuffer;
3945 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003946 if (valid) {
3947 int32_t rc = mDepthChannel->populateDepthData(depthData,
3948 frameNumber);
3949 if (NO_ERROR != rc) {
3950 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3951 } else {
3952 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3953 }
Emilian Peev7650c122017-01-19 08:24:33 -08003954 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003955 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003956 }
3957 } else if (currentFrameNumber > frameNumber) {
3958 break;
3959 } else {
3960 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3961 {{currentFrameNumber, mDepthChannel->getStream(),
3962 CAMERA3_MSG_ERROR_BUFFER}}};
3963 orchestrateNotify(&notify_msg);
3964
3965 LOGE("Depth buffer for frame number: %d is missing "
3966 "returning back!", currentFrameNumber);
3967 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3968 }
3969 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003970 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003971 } while (currentFrameNumber < frameNumber);
3972}
3973
3974/*===========================================================================
3975 * FUNCTION : notifyErrorFoPendingDepthData
3976 *
3977 * DESCRIPTION: Returns error for any pending depth buffers
3978 *
3979 * PARAMETERS : depthCh - depth channel that needs to get flushed
3980 *
3981 * RETURN :
3982 *
3983 *==========================================================================*/
3984void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3985 QCamera3DepthChannel *depthCh) {
3986 uint32_t currentFrameNumber;
3987 buffer_handle_t *depthBuffer;
3988
3989 if (nullptr == depthCh) {
3990 return;
3991 }
3992
3993 camera3_notify_msg_t notify_msg =
3994 {.type = CAMERA3_MSG_ERROR,
3995 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3996 camera3_stream_buffer_t resultBuffer =
3997 {.acquire_fence = -1,
3998 .release_fence = -1,
3999 .buffer = nullptr,
4000 .stream = depthCh->getStream(),
4001 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004002
4003 while (nullptr !=
4004 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4005 depthCh->unmapBuffer(currentFrameNumber);
4006
4007 notify_msg.message.error.frame_number = currentFrameNumber;
4008 orchestrateNotify(&notify_msg);
4009
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004010 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004011 };
4012}
4013
4014/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004015 * FUNCTION : hdrPlusPerfLock
4016 *
4017 * DESCRIPTION: perf lock for HDR+ using custom intent
4018 *
4019 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4020 *
4021 * RETURN : None
4022 *
4023 *==========================================================================*/
4024void QCamera3HardwareInterface::hdrPlusPerfLock(
4025 mm_camera_super_buf_t *metadata_buf)
4026{
4027 if (NULL == metadata_buf) {
4028 LOGE("metadata_buf is NULL");
4029 return;
4030 }
4031 metadata_buffer_t *metadata =
4032 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4033 int32_t *p_frame_number_valid =
4034 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4035 uint32_t *p_frame_number =
4036 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4037
4038 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4039 LOGE("%s: Invalid metadata", __func__);
4040 return;
4041 }
4042
Wei Wang01385482017-08-03 10:49:34 -07004043 //acquire perf lock for 2 secs after the last HDR frame is captured
4044 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004045 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4046 if ((p_frame_number != NULL) &&
4047 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004048 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004049 }
4050 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004051}
4052
4053/*===========================================================================
4054 * FUNCTION : handleInputBufferWithLock
4055 *
4056 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4057 *
4058 * PARAMETERS : @frame_number: frame number of the input buffer
4059 *
4060 * RETURN :
4061 *
4062 *==========================================================================*/
4063void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4064{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004065 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004066 pendingRequestIterator i = mPendingRequestsList.begin();
4067 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4068 i++;
4069 }
4070 if (i != mPendingRequestsList.end() && i->input_buffer) {
4071 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004072 CameraMetadata settings;
4073 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4074 if(i->settings) {
4075 settings = i->settings;
4076 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4077 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004078 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004079 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004080 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004081 } else {
4082 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004083 }
4084
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004085 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4086 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4087 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004088
4089 camera3_capture_result result;
4090 memset(&result, 0, sizeof(camera3_capture_result));
4091 result.frame_number = frame_number;
4092 result.result = i->settings;
4093 result.input_buffer = i->input_buffer;
4094 result.partial_result = PARTIAL_RESULT_COUNT;
4095
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004096 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004097 LOGD("Input request metadata and input buffer frame_number = %u",
4098 i->frame_number);
4099 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004100
4101 // Dispatch result metadata that may be just unblocked by this reprocess result.
4102 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004103 } else {
4104 LOGE("Could not find input request for frame number %d", frame_number);
4105 }
4106}
4107
4108/*===========================================================================
4109 * FUNCTION : handleBufferWithLock
4110 *
4111 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4112 *
4113 * PARAMETERS : @buffer: image buffer for the callback
4114 * @frame_number: frame number of the image buffer
4115 *
4116 * RETURN :
4117 *
4118 *==========================================================================*/
4119void QCamera3HardwareInterface::handleBufferWithLock(
4120 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4121{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004122 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004123
4124 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4125 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4126 }
4127
Thierry Strudel3d639192016-09-09 11:52:26 -07004128 /* Nothing to be done during error state */
4129 if ((ERROR == mState) || (DEINIT == mState)) {
4130 return;
4131 }
4132 if (mFlushPerf) {
4133 handleBuffersDuringFlushLock(buffer);
4134 return;
4135 }
4136 //not in flush
4137 // If the frame number doesn't exist in the pending request list,
4138 // directly send the buffer to the frameworks, and update pending buffers map
4139 // Otherwise, book-keep the buffer.
4140 pendingRequestIterator i = mPendingRequestsList.begin();
4141 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4142 i++;
4143 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004144
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004145 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004146 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004147 // For a reprocessing request, try to send out result metadata.
4148 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004149 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004150 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004151
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004152 // Check if this frame was dropped.
4153 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4154 m != mPendingFrameDropList.end(); m++) {
4155 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4156 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4157 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4158 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4159 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4160 frame_number, streamID);
4161 m = mPendingFrameDropList.erase(m);
4162 break;
4163 }
4164 }
4165
4166 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4167 LOGH("result frame_number = %d, buffer = %p",
4168 frame_number, buffer->buffer);
4169
4170 mPendingBuffersMap.removeBuf(buffer->buffer);
4171 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4172
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004173 if (mPreviewStarted == false) {
4174 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4175 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004176 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4177
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004178 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4179 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4180 mPreviewStarted = true;
4181
4182 // Set power hint for preview
4183 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4184 }
4185 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004186}
4187
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004188void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004189 const camera_metadata_t *resultMetadata)
4190{
4191 // Find the pending request for this result metadata.
4192 auto requestIter = mPendingRequestsList.begin();
4193 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4194 requestIter++;
4195 }
4196
4197 if (requestIter == mPendingRequestsList.end()) {
4198 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4199 return;
4200 }
4201
4202 // Update the result metadata
4203 requestIter->resultMetadata = resultMetadata;
4204
4205 // Check what type of request this is.
4206 bool liveRequest = false;
4207 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004208 // HDR+ request doesn't have partial results.
4209 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004210 } else if (requestIter->input_buffer != nullptr) {
4211 // Reprocessing request result is the same as settings.
4212 requestIter->resultMetadata = requestIter->settings;
4213 // Reprocessing request doesn't have partial results.
4214 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4215 } else {
4216 liveRequest = true;
4217 requestIter->partial_result_cnt++;
4218 mPendingLiveRequest--;
4219
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004220 {
4221 Mutex::Autolock l(gHdrPlusClientLock);
4222 // For a live request, send the metadata to HDR+ client.
4223 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4224 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4225 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4226 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004227 }
4228 }
4229
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004230 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4231}
4232
4233void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4234 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004235 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4236 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004237 bool readyToSend = true;
4238
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004239 // Iterate through the pending requests to send out result metadata that are ready. Also if
4240 // this result metadata belongs to a live request, notify errors for previous live requests
4241 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004242 auto iter = mPendingRequestsList.begin();
4243 while (iter != mPendingRequestsList.end()) {
4244 // Check if current pending request is ready. If it's not ready, the following pending
4245 // requests are also not ready.
4246 if (readyToSend && iter->resultMetadata == nullptr) {
4247 readyToSend = false;
4248 }
4249
4250 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4251
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004252 camera3_capture_result_t result = {};
4253 result.frame_number = iter->frame_number;
4254 result.result = iter->resultMetadata;
4255 result.partial_result = iter->partial_result_cnt;
4256
4257 // If this pending buffer has result metadata, we may be able to send out shutter callback
4258 // and result metadata.
4259 if (iter->resultMetadata != nullptr) {
4260 if (!readyToSend) {
4261 // If any of the previous pending request is not ready, this pending request is
4262 // also not ready to send in order to keep shutter callbacks and result metadata
4263 // in order.
4264 iter++;
4265 continue;
4266 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004267 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004268 // If the result metadata belongs to a live request, notify errors for previous pending
4269 // live requests.
4270 mPendingLiveRequest--;
4271
4272 CameraMetadata dummyMetadata;
4273 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4274 result.result = dummyMetadata.release();
4275
4276 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004277
4278 // partial_result should be PARTIAL_RESULT_CNT in case of
4279 // ERROR_RESULT.
4280 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4281 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004282 } else {
4283 iter++;
4284 continue;
4285 }
4286
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004287 result.output_buffers = nullptr;
4288 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004289 orchestrateResult(&result);
4290
4291 // For reprocessing, result metadata is the same as settings so do not free it here to
4292 // avoid double free.
4293 if (result.result != iter->settings) {
4294 free_camera_metadata((camera_metadata_t *)result.result);
4295 }
4296 iter->resultMetadata = nullptr;
4297 iter = erasePendingRequest(iter);
4298 }
4299
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004300 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004301 for (auto &iter : mPendingRequestsList) {
4302 // Increment pipeline depth for the following pending requests.
4303 if (iter.frame_number > frameNumber) {
4304 iter.pipeline_depth++;
4305 }
4306 }
4307 }
4308
4309 unblockRequestIfNecessary();
4310}
4311
Thierry Strudel3d639192016-09-09 11:52:26 -07004312/*===========================================================================
4313 * FUNCTION : unblockRequestIfNecessary
4314 *
4315 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4316 * that mMutex is held when this function is called.
4317 *
4318 * PARAMETERS :
4319 *
4320 * RETURN :
4321 *
4322 *==========================================================================*/
4323void QCamera3HardwareInterface::unblockRequestIfNecessary()
4324{
4325 // Unblock process_capture_request
4326 pthread_cond_signal(&mRequestCond);
4327}
4328
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004329/*===========================================================================
4330 * FUNCTION : isHdrSnapshotRequest
4331 *
4332 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4333 *
4334 * PARAMETERS : camera3 request structure
4335 *
4336 * RETURN : boolean decision variable
4337 *
4338 *==========================================================================*/
4339bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4340{
4341 if (request == NULL) {
4342 LOGE("Invalid request handle");
4343 assert(0);
4344 return false;
4345 }
4346
4347 if (!mForceHdrSnapshot) {
4348 CameraMetadata frame_settings;
4349 frame_settings = request->settings;
4350
4351 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4352 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4353 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4354 return false;
4355 }
4356 } else {
4357 return false;
4358 }
4359
4360 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4361 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4362 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4363 return false;
4364 }
4365 } else {
4366 return false;
4367 }
4368 }
4369
4370 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4371 if (request->output_buffers[i].stream->format
4372 == HAL_PIXEL_FORMAT_BLOB) {
4373 return true;
4374 }
4375 }
4376
4377 return false;
4378}
4379/*===========================================================================
4380 * FUNCTION : orchestrateRequest
4381 *
4382 * DESCRIPTION: Orchestrates a capture request from camera service
4383 *
4384 * PARAMETERS :
4385 * @request : request from framework to process
4386 *
4387 * RETURN : Error status codes
4388 *
4389 *==========================================================================*/
4390int32_t QCamera3HardwareInterface::orchestrateRequest(
4391 camera3_capture_request_t *request)
4392{
4393
4394 uint32_t originalFrameNumber = request->frame_number;
4395 uint32_t originalOutputCount = request->num_output_buffers;
4396 const camera_metadata_t *original_settings = request->settings;
4397 List<InternalRequest> internallyRequestedStreams;
4398 List<InternalRequest> emptyInternalList;
4399
4400 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4401 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4402 uint32_t internalFrameNumber;
4403 CameraMetadata modified_meta;
4404
4405
4406 /* Add Blob channel to list of internally requested streams */
4407 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4408 if (request->output_buffers[i].stream->format
4409 == HAL_PIXEL_FORMAT_BLOB) {
4410 InternalRequest streamRequested;
4411 streamRequested.meteringOnly = 1;
4412 streamRequested.need_metadata = 0;
4413 streamRequested.stream = request->output_buffers[i].stream;
4414 internallyRequestedStreams.push_back(streamRequested);
4415 }
4416 }
4417 request->num_output_buffers = 0;
4418 auto itr = internallyRequestedStreams.begin();
4419
4420 /* Modify setting to set compensation */
4421 modified_meta = request->settings;
4422 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4423 uint8_t aeLock = 1;
4424 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4425 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4426 camera_metadata_t *modified_settings = modified_meta.release();
4427 request->settings = modified_settings;
4428
4429 /* Capture Settling & -2x frame */
4430 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4431 request->frame_number = internalFrameNumber;
4432 processCaptureRequest(request, internallyRequestedStreams);
4433
4434 request->num_output_buffers = originalOutputCount;
4435 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4436 request->frame_number = internalFrameNumber;
4437 processCaptureRequest(request, emptyInternalList);
4438 request->num_output_buffers = 0;
4439
4440 modified_meta = modified_settings;
4441 expCompensation = 0;
4442 aeLock = 1;
4443 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4444 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4445 modified_settings = modified_meta.release();
4446 request->settings = modified_settings;
4447
4448 /* Capture Settling & 0X frame */
4449
4450 itr = internallyRequestedStreams.begin();
4451 if (itr == internallyRequestedStreams.end()) {
4452 LOGE("Error Internally Requested Stream list is empty");
4453 assert(0);
4454 } else {
4455 itr->need_metadata = 0;
4456 itr->meteringOnly = 1;
4457 }
4458
4459 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4460 request->frame_number = internalFrameNumber;
4461 processCaptureRequest(request, internallyRequestedStreams);
4462
4463 itr = internallyRequestedStreams.begin();
4464 if (itr == internallyRequestedStreams.end()) {
4465 ALOGE("Error Internally Requested Stream list is empty");
4466 assert(0);
4467 } else {
4468 itr->need_metadata = 1;
4469 itr->meteringOnly = 0;
4470 }
4471
4472 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4473 request->frame_number = internalFrameNumber;
4474 processCaptureRequest(request, internallyRequestedStreams);
4475
4476 /* Capture 2X frame*/
4477 modified_meta = modified_settings;
4478 expCompensation = GB_HDR_2X_STEP_EV;
4479 aeLock = 1;
4480 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4481 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4482 modified_settings = modified_meta.release();
4483 request->settings = modified_settings;
4484
4485 itr = internallyRequestedStreams.begin();
4486 if (itr == internallyRequestedStreams.end()) {
4487 ALOGE("Error Internally Requested Stream list is empty");
4488 assert(0);
4489 } else {
4490 itr->need_metadata = 0;
4491 itr->meteringOnly = 1;
4492 }
4493 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4494 request->frame_number = internalFrameNumber;
4495 processCaptureRequest(request, internallyRequestedStreams);
4496
4497 itr = internallyRequestedStreams.begin();
4498 if (itr == internallyRequestedStreams.end()) {
4499 ALOGE("Error Internally Requested Stream list is empty");
4500 assert(0);
4501 } else {
4502 itr->need_metadata = 1;
4503 itr->meteringOnly = 0;
4504 }
4505
4506 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4507 request->frame_number = internalFrameNumber;
4508 processCaptureRequest(request, internallyRequestedStreams);
4509
4510
4511 /* Capture 2X on original streaming config*/
4512 internallyRequestedStreams.clear();
4513
4514 /* Restore original settings pointer */
4515 request->settings = original_settings;
4516 } else {
4517 uint32_t internalFrameNumber;
4518 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4519 request->frame_number = internalFrameNumber;
4520 return processCaptureRequest(request, internallyRequestedStreams);
4521 }
4522
4523 return NO_ERROR;
4524}
4525
4526/*===========================================================================
4527 * FUNCTION : orchestrateResult
4528 *
4529 * DESCRIPTION: Orchestrates a capture result to camera service
4530 *
4531 * PARAMETERS :
4532 * @request : request from framework to process
4533 *
4534 * RETURN :
4535 *
4536 *==========================================================================*/
4537void QCamera3HardwareInterface::orchestrateResult(
4538 camera3_capture_result_t *result)
4539{
4540 uint32_t frameworkFrameNumber;
4541 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4542 frameworkFrameNumber);
4543 if (rc != NO_ERROR) {
4544 LOGE("Cannot find translated frameworkFrameNumber");
4545 assert(0);
4546 } else {
4547 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004548 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004549 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004550 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004551 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4552 camera_metadata_entry_t entry;
4553 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4554 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004555 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004556 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4557 if (ret != OK)
4558 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004559 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004560 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004561 result->frame_number = frameworkFrameNumber;
4562 mCallbackOps->process_capture_result(mCallbackOps, result);
4563 }
4564 }
4565}
4566
4567/*===========================================================================
4568 * FUNCTION : orchestrateNotify
4569 *
4570 * DESCRIPTION: Orchestrates a notify to camera service
4571 *
4572 * PARAMETERS :
4573 * @request : request from framework to process
4574 *
4575 * RETURN :
4576 *
4577 *==========================================================================*/
4578void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4579{
4580 uint32_t frameworkFrameNumber;
4581 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004582 int32_t rc = NO_ERROR;
4583
4584 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004585 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004586
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004587 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004588 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4589 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4590 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004591 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004592 LOGE("Cannot find translated frameworkFrameNumber");
4593 assert(0);
4594 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004595 }
4596 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004597
4598 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4599 LOGD("Internal Request drop the notifyCb");
4600 } else {
4601 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4602 mCallbackOps->notify(mCallbackOps, notify_msg);
4603 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004604}
4605
4606/*===========================================================================
4607 * FUNCTION : FrameNumberRegistry
4608 *
4609 * DESCRIPTION: Constructor
4610 *
4611 * PARAMETERS :
4612 *
4613 * RETURN :
4614 *
4615 *==========================================================================*/
4616FrameNumberRegistry::FrameNumberRegistry()
4617{
4618 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4619}
4620
4621/*===========================================================================
4622 * FUNCTION : ~FrameNumberRegistry
4623 *
4624 * DESCRIPTION: Destructor
4625 *
4626 * PARAMETERS :
4627 *
4628 * RETURN :
4629 *
4630 *==========================================================================*/
4631FrameNumberRegistry::~FrameNumberRegistry()
4632{
4633}
4634
4635/*===========================================================================
4636 * FUNCTION : PurgeOldEntriesLocked
4637 *
4638 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4639 *
4640 * PARAMETERS :
4641 *
4642 * RETURN : NONE
4643 *
4644 *==========================================================================*/
4645void FrameNumberRegistry::purgeOldEntriesLocked()
4646{
4647 while (_register.begin() != _register.end()) {
4648 auto itr = _register.begin();
4649 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4650 _register.erase(itr);
4651 } else {
4652 return;
4653 }
4654 }
4655}
4656
4657/*===========================================================================
4658 * FUNCTION : allocStoreInternalFrameNumber
4659 *
4660 * DESCRIPTION: Method to note down a framework request and associate a new
4661 * internal request number against it
4662 *
4663 * PARAMETERS :
4664 * @fFrameNumber: Identifier given by framework
4665 * @internalFN : Output parameter which will have the newly generated internal
4666 * entry
4667 *
4668 * RETURN : Error code
4669 *
4670 *==========================================================================*/
4671int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4672 uint32_t &internalFrameNumber)
4673{
4674 Mutex::Autolock lock(mRegistryLock);
4675 internalFrameNumber = _nextFreeInternalNumber++;
4676 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4677 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4678 purgeOldEntriesLocked();
4679 return NO_ERROR;
4680}
4681
4682/*===========================================================================
4683 * FUNCTION : generateStoreInternalFrameNumber
4684 *
4685 * DESCRIPTION: Method to associate a new internal request number independent
4686 * of any associate with framework requests
4687 *
4688 * PARAMETERS :
4689 * @internalFrame#: Output parameter which will have the newly generated internal
4690 *
4691 *
4692 * RETURN : Error code
4693 *
4694 *==========================================================================*/
4695int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4696{
4697 Mutex::Autolock lock(mRegistryLock);
4698 internalFrameNumber = _nextFreeInternalNumber++;
4699 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4700 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4701 purgeOldEntriesLocked();
4702 return NO_ERROR;
4703}
4704
4705/*===========================================================================
4706 * FUNCTION : getFrameworkFrameNumber
4707 *
4708 * DESCRIPTION: Method to query the framework framenumber given an internal #
4709 *
4710 * PARAMETERS :
4711 * @internalFrame#: Internal reference
4712 * @frameworkframenumber: Output parameter holding framework frame entry
4713 *
4714 * RETURN : Error code
4715 *
4716 *==========================================================================*/
4717int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4718 uint32_t &frameworkFrameNumber)
4719{
4720 Mutex::Autolock lock(mRegistryLock);
4721 auto itr = _register.find(internalFrameNumber);
4722 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004723 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004724 return -ENOENT;
4725 }
4726
4727 frameworkFrameNumber = itr->second;
4728 purgeOldEntriesLocked();
4729 return NO_ERROR;
4730}
Thierry Strudel3d639192016-09-09 11:52:26 -07004731
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004732status_t QCamera3HardwareInterface::fillPbStreamConfig(
4733 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4734 QCamera3Channel *channel, uint32_t streamIndex) {
4735 if (config == nullptr) {
4736 LOGE("%s: config is null", __FUNCTION__);
4737 return BAD_VALUE;
4738 }
4739
4740 if (channel == nullptr) {
4741 LOGE("%s: channel is null", __FUNCTION__);
4742 return BAD_VALUE;
4743 }
4744
4745 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4746 if (stream == nullptr) {
4747 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4748 return NAME_NOT_FOUND;
4749 }
4750
4751 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4752 if (streamInfo == nullptr) {
4753 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4754 return NAME_NOT_FOUND;
4755 }
4756
4757 config->id = pbStreamId;
4758 config->image.width = streamInfo->dim.width;
4759 config->image.height = streamInfo->dim.height;
4760 config->image.padding = 0;
4761 config->image.format = pbStreamFormat;
4762
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004763 uint32_t totalPlaneSize = 0;
4764
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004765 // Fill plane information.
4766 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4767 pbcamera::PlaneConfiguration plane;
4768 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4769 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4770 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004771
4772 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004773 }
4774
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004775 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004776 return OK;
4777}
4778
Thierry Strudel3d639192016-09-09 11:52:26 -07004779/*===========================================================================
4780 * FUNCTION : processCaptureRequest
4781 *
4782 * DESCRIPTION: process a capture request from camera service
4783 *
4784 * PARAMETERS :
4785 * @request : request from framework to process
4786 *
4787 * RETURN :
4788 *
4789 *==========================================================================*/
4790int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004791 camera3_capture_request_t *request,
4792 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004793{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004794 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004795 int rc = NO_ERROR;
4796 int32_t request_id;
4797 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004798 bool isVidBufRequested = false;
4799 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004800 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004801
4802 pthread_mutex_lock(&mMutex);
4803
4804 // Validate current state
4805 switch (mState) {
4806 case CONFIGURED:
4807 case STARTED:
4808 /* valid state */
4809 break;
4810
4811 case ERROR:
4812 pthread_mutex_unlock(&mMutex);
4813 handleCameraDeviceError();
4814 return -ENODEV;
4815
4816 default:
4817 LOGE("Invalid state %d", mState);
4818 pthread_mutex_unlock(&mMutex);
4819 return -ENODEV;
4820 }
4821
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004822 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004823 if (rc != NO_ERROR) {
4824 LOGE("incoming request is not valid");
4825 pthread_mutex_unlock(&mMutex);
4826 return rc;
4827 }
4828
4829 meta = request->settings;
4830
4831 // For first capture request, send capture intent, and
4832 // stream on all streams
4833 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004834 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004835 // send an unconfigure to the backend so that the isp
4836 // resources are deallocated
4837 if (!mFirstConfiguration) {
4838 cam_stream_size_info_t stream_config_info;
4839 int32_t hal_version = CAM_HAL_V3;
4840 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4841 stream_config_info.buffer_info.min_buffers =
4842 MIN_INFLIGHT_REQUESTS;
4843 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004844 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004845 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 clear_metadata_buffer(mParameters);
4847 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4848 CAM_INTF_PARM_HAL_VERSION, hal_version);
4849 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4850 CAM_INTF_META_STREAM_INFO, stream_config_info);
4851 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4852 mParameters);
4853 if (rc < 0) {
4854 LOGE("set_parms for unconfigure failed");
4855 pthread_mutex_unlock(&mMutex);
4856 return rc;
4857 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004858
Thierry Strudel3d639192016-09-09 11:52:26 -07004859 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004860 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004861 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004862 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004864 property_get("persist.camera.is_type", is_type_value, "4");
4865 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4866 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4867 property_get("persist.camera.is_type_preview", is_type_value, "4");
4868 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4869 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004870
4871 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4872 int32_t hal_version = CAM_HAL_V3;
4873 uint8_t captureIntent =
4874 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4875 mCaptureIntent = captureIntent;
4876 clear_metadata_buffer(mParameters);
4877 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4878 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4879 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004880 if (mFirstConfiguration) {
4881 // configure instant AEC
4882 // Instant AEC is a session based parameter and it is needed only
4883 // once per complete session after open camera.
4884 // i.e. This is set only once for the first capture request, after open camera.
4885 setInstantAEC(meta);
4886 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004887 uint8_t fwkVideoStabMode=0;
4888 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4889 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4890 }
4891
Xue Tuecac74e2017-04-17 13:58:15 -07004892 // If EIS setprop is enabled then only turn it on for video/preview
4893 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004894 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004895 int32_t vsMode;
4896 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4897 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4898 rc = BAD_VALUE;
4899 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004900 LOGD("setEis %d", setEis);
4901 bool eis3Supported = false;
4902 size_t count = IS_TYPE_MAX;
4903 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4904 for (size_t i = 0; i < count; i++) {
4905 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4906 eis3Supported = true;
4907 break;
4908 }
4909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004910
4911 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004912 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004913 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4914 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004915 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4916 is_type = isTypePreview;
4917 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4918 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4919 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004921 } else {
4922 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004923 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004924 } else {
4925 is_type = IS_TYPE_NONE;
4926 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004928 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4930 }
4931 }
4932
4933 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4934 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4935
Thierry Strudel54dc9782017-02-15 12:12:10 -08004936 //Disable tintless only if the property is set to 0
4937 memset(prop, 0, sizeof(prop));
4938 property_get("persist.camera.tintless.enable", prop, "1");
4939 int32_t tintless_value = atoi(prop);
4940
Thierry Strudel3d639192016-09-09 11:52:26 -07004941 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4942 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004943
Thierry Strudel3d639192016-09-09 11:52:26 -07004944 //Disable CDS for HFR mode or if DIS/EIS is on.
4945 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4946 //after every configure_stream
4947 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4948 (m_bIsVideo)) {
4949 int32_t cds = CAM_CDS_MODE_OFF;
4950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4951 CAM_INTF_PARM_CDS_MODE, cds))
4952 LOGE("Failed to disable CDS for HFR mode");
4953
4954 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004955
4956 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4957 uint8_t* use_av_timer = NULL;
4958
4959 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004960 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004961 use_av_timer = &m_debug_avtimer;
4962 }
4963 else{
4964 use_av_timer =
4965 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004966 if (use_av_timer) {
4967 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4968 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004969 }
4970
4971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4972 rc = BAD_VALUE;
4973 }
4974 }
4975
Thierry Strudel3d639192016-09-09 11:52:26 -07004976 setMobicat();
4977
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004978 uint8_t nrMode = 0;
4979 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4980 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4981 }
4982
Thierry Strudel3d639192016-09-09 11:52:26 -07004983 /* Set fps and hfr mode while sending meta stream info so that sensor
4984 * can configure appropriate streaming mode */
4985 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004986 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4987 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004988 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4989 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004990 if (rc == NO_ERROR) {
4991 int32_t max_fps =
4992 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004993 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004994 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4995 }
4996 /* For HFR, more buffers are dequeued upfront to improve the performance */
4997 if (mBatchSize) {
4998 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4999 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5000 }
5001 }
5002 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005003 LOGE("setHalFpsRange failed");
5004 }
5005 }
5006 if (meta.exists(ANDROID_CONTROL_MODE)) {
5007 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5008 rc = extractSceneMode(meta, metaMode, mParameters);
5009 if (rc != NO_ERROR) {
5010 LOGE("extractSceneMode failed");
5011 }
5012 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005013 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005014
Thierry Strudel04e026f2016-10-10 11:27:36 -07005015 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5016 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5017 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5018 rc = setVideoHdrMode(mParameters, vhdr);
5019 if (rc != NO_ERROR) {
5020 LOGE("setVideoHDR is failed");
5021 }
5022 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005023
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005024 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005025 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005026 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005027 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5028 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5029 sensorModeFullFov)) {
5030 rc = BAD_VALUE;
5031 }
5032 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005033 //TODO: validate the arguments, HSV scenemode should have only the
5034 //advertised fps ranges
5035
5036 /*set the capture intent, hal version, tintless, stream info,
5037 *and disenable parameters to the backend*/
5038 LOGD("set_parms META_STREAM_INFO " );
5039 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005040 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5041 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005042 mStreamConfigInfo.type[i],
5043 mStreamConfigInfo.stream_sizes[i].width,
5044 mStreamConfigInfo.stream_sizes[i].height,
5045 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005046 mStreamConfigInfo.format[i],
5047 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005048 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005049
Thierry Strudel3d639192016-09-09 11:52:26 -07005050 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5051 mParameters);
5052 if (rc < 0) {
5053 LOGE("set_parms failed for hal version, stream info");
5054 }
5055
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005056 cam_sensor_mode_info_t sensorModeInfo = {};
5057 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 if (rc != NO_ERROR) {
5059 LOGE("Failed to get sensor output size");
5060 pthread_mutex_unlock(&mMutex);
5061 goto error_exit;
5062 }
5063
5064 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5065 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005066 sensorModeInfo.active_array_size.width,
5067 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005068
5069 /* Set batchmode before initializing channel. Since registerBuffer
5070 * internally initializes some of the channels, better set batchmode
5071 * even before first register buffer */
5072 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5073 it != mStreamInfo.end(); it++) {
5074 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5075 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5076 && mBatchSize) {
5077 rc = channel->setBatchSize(mBatchSize);
5078 //Disable per frame map unmap for HFR/batchmode case
5079 rc |= channel->setPerFrameMapUnmap(false);
5080 if (NO_ERROR != rc) {
5081 LOGE("Channel init failed %d", rc);
5082 pthread_mutex_unlock(&mMutex);
5083 goto error_exit;
5084 }
5085 }
5086 }
5087
5088 //First initialize all streams
5089 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5090 it != mStreamInfo.end(); it++) {
5091 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005092
5093 /* Initial value of NR mode is needed before stream on */
5094 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005095 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5096 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005097 setEis) {
5098 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5099 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5100 is_type = mStreamConfigInfo.is_type[i];
5101 break;
5102 }
5103 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005104 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005105 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005106 rc = channel->initialize(IS_TYPE_NONE);
5107 }
5108 if (NO_ERROR != rc) {
5109 LOGE("Channel initialization failed %d", rc);
5110 pthread_mutex_unlock(&mMutex);
5111 goto error_exit;
5112 }
5113 }
5114
5115 if (mRawDumpChannel) {
5116 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5117 if (rc != NO_ERROR) {
5118 LOGE("Error: Raw Dump Channel init failed");
5119 pthread_mutex_unlock(&mMutex);
5120 goto error_exit;
5121 }
5122 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005123 if (mHdrPlusRawSrcChannel) {
5124 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5125 if (rc != NO_ERROR) {
5126 LOGE("Error: HDR+ RAW Source Channel init failed");
5127 pthread_mutex_unlock(&mMutex);
5128 goto error_exit;
5129 }
5130 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005131 if (mSupportChannel) {
5132 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5133 if (rc < 0) {
5134 LOGE("Support channel initialization failed");
5135 pthread_mutex_unlock(&mMutex);
5136 goto error_exit;
5137 }
5138 }
5139 if (mAnalysisChannel) {
5140 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5141 if (rc < 0) {
5142 LOGE("Analysis channel initialization failed");
5143 pthread_mutex_unlock(&mMutex);
5144 goto error_exit;
5145 }
5146 }
5147 if (mDummyBatchChannel) {
5148 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5149 if (rc < 0) {
5150 LOGE("mDummyBatchChannel setBatchSize failed");
5151 pthread_mutex_unlock(&mMutex);
5152 goto error_exit;
5153 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005154 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005155 if (rc < 0) {
5156 LOGE("mDummyBatchChannel initialization failed");
5157 pthread_mutex_unlock(&mMutex);
5158 goto error_exit;
5159 }
5160 }
5161
5162 // Set bundle info
5163 rc = setBundleInfo();
5164 if (rc < 0) {
5165 LOGE("setBundleInfo failed %d", rc);
5166 pthread_mutex_unlock(&mMutex);
5167 goto error_exit;
5168 }
5169
5170 //update settings from app here
5171 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5172 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5173 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5174 }
5175 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5176 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5177 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5178 }
5179 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5180 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5181 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5182
5183 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5184 (mLinkedCameraId != mCameraId) ) {
5185 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5186 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005187 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005188 goto error_exit;
5189 }
5190 }
5191
5192 // add bundle related cameras
5193 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5194 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005195 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5196 &m_pDualCamCmdPtr->bundle_info;
5197 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005198 if (mIsDeviceLinked)
5199 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5200 else
5201 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5202
5203 pthread_mutex_lock(&gCamLock);
5204
5205 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5206 LOGE("Dualcam: Invalid Session Id ");
5207 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005208 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005209 goto error_exit;
5210 }
5211
5212 if (mIsMainCamera == 1) {
5213 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5214 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005215 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005216 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005217 // related session id should be session id of linked session
5218 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5219 } else {
5220 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5221 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005222 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005223 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005224 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5225 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005226 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 pthread_mutex_unlock(&gCamLock);
5228
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005229 rc = mCameraHandle->ops->set_dual_cam_cmd(
5230 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005231 if (rc < 0) {
5232 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005233 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005234 goto error_exit;
5235 }
5236 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005237 goto no_error;
5238error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005239 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005240 return rc;
5241no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 mWokenUpByDaemon = false;
5243 mPendingLiveRequest = 0;
5244 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005245 }
5246
5247 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005248 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005249
5250 if (mFlushPerf) {
5251 //we cannot accept any requests during flush
5252 LOGE("process_capture_request cannot proceed during flush");
5253 pthread_mutex_unlock(&mMutex);
5254 return NO_ERROR; //should return an error
5255 }
5256
5257 if (meta.exists(ANDROID_REQUEST_ID)) {
5258 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5259 mCurrentRequestId = request_id;
5260 LOGD("Received request with id: %d", request_id);
5261 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5262 LOGE("Unable to find request id field, \
5263 & no previous id available");
5264 pthread_mutex_unlock(&mMutex);
5265 return NAME_NOT_FOUND;
5266 } else {
5267 LOGD("Re-using old request id");
5268 request_id = mCurrentRequestId;
5269 }
5270
5271 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5272 request->num_output_buffers,
5273 request->input_buffer,
5274 frameNumber);
5275 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005276 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005277 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005278 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005279 uint32_t snapshotStreamId = 0;
5280 for (size_t i = 0; i < request->num_output_buffers; i++) {
5281 const camera3_stream_buffer_t& output = request->output_buffers[i];
5282 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5283
Emilian Peev7650c122017-01-19 08:24:33 -08005284 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5285 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005286 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005287 blob_request = 1;
5288 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5289 }
5290
5291 if (output.acquire_fence != -1) {
5292 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5293 close(output.acquire_fence);
5294 if (rc != OK) {
5295 LOGE("sync wait failed %d", rc);
5296 pthread_mutex_unlock(&mMutex);
5297 return rc;
5298 }
5299 }
5300
Emilian Peev0f3c3162017-03-15 12:57:46 +00005301 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5302 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005303 depthRequestPresent = true;
5304 continue;
5305 }
5306
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005307 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005308 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005309
5310 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5311 isVidBufRequested = true;
5312 }
5313 }
5314
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005315 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5316 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5317 itr++) {
5318 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5319 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5320 channel->getStreamID(channel->getStreamTypeMask());
5321
5322 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5323 isVidBufRequested = true;
5324 }
5325 }
5326
Thierry Strudel3d639192016-09-09 11:52:26 -07005327 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005328 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005329 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005330 }
5331 if (blob_request && mRawDumpChannel) {
5332 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005333 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005334 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005335 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005336 }
5337
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005338 {
5339 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5340 // Request a RAW buffer if
5341 // 1. mHdrPlusRawSrcChannel is valid.
5342 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5343 // 3. There is no pending HDR+ request.
5344 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5345 mHdrPlusPendingRequests.size() == 0) {
5346 streamsArray.stream_request[streamsArray.num_streams].streamID =
5347 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5348 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5349 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005350 }
5351
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005352 //extract capture intent
5353 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5354 mCaptureIntent =
5355 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5356 }
5357
5358 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5359 mCacMode =
5360 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5361 }
5362
5363 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005364 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005365
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005366 {
5367 Mutex::Autolock l(gHdrPlusClientLock);
5368 // If this request has a still capture intent, try to submit an HDR+ request.
5369 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5370 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5371 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5372 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005373 }
5374
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005375 if (hdrPlusRequest) {
5376 // For a HDR+ request, just set the frame parameters.
5377 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5378 if (rc < 0) {
5379 LOGE("fail to set frame parameters");
5380 pthread_mutex_unlock(&mMutex);
5381 return rc;
5382 }
5383 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005384 /* Parse the settings:
5385 * - For every request in NORMAL MODE
5386 * - For every request in HFR mode during preview only case
5387 * - For first request of every batch in HFR mode during video
5388 * recording. In batchmode the same settings except frame number is
5389 * repeated in each request of the batch.
5390 */
5391 if (!mBatchSize ||
5392 (mBatchSize && !isVidBufRequested) ||
5393 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005394 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005395 if (rc < 0) {
5396 LOGE("fail to set frame parameters");
5397 pthread_mutex_unlock(&mMutex);
5398 return rc;
5399 }
5400 }
5401 /* For batchMode HFR, setFrameParameters is not called for every
5402 * request. But only frame number of the latest request is parsed.
5403 * Keep track of first and last frame numbers in a batch so that
5404 * metadata for the frame numbers of batch can be duplicated in
5405 * handleBatchMetadta */
5406 if (mBatchSize) {
5407 if (!mToBeQueuedVidBufs) {
5408 //start of the batch
5409 mFirstFrameNumberInBatch = request->frame_number;
5410 }
5411 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5412 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5413 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005414 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005415 return BAD_VALUE;
5416 }
5417 }
5418 if (mNeedSensorRestart) {
5419 /* Unlock the mutex as restartSensor waits on the channels to be
5420 * stopped, which in turn calls stream callback functions -
5421 * handleBufferWithLock and handleMetadataWithLock */
5422 pthread_mutex_unlock(&mMutex);
5423 rc = dynamicUpdateMetaStreamInfo();
5424 if (rc != NO_ERROR) {
5425 LOGE("Restarting the sensor failed");
5426 return BAD_VALUE;
5427 }
5428 mNeedSensorRestart = false;
5429 pthread_mutex_lock(&mMutex);
5430 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005431 if(mResetInstantAEC) {
5432 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5433 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5434 mResetInstantAEC = false;
5435 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005436 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005437 if (request->input_buffer->acquire_fence != -1) {
5438 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5439 close(request->input_buffer->acquire_fence);
5440 if (rc != OK) {
5441 LOGE("input buffer sync wait failed %d", rc);
5442 pthread_mutex_unlock(&mMutex);
5443 return rc;
5444 }
5445 }
5446 }
5447
5448 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5449 mLastCustIntentFrmNum = frameNumber;
5450 }
5451 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005452 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005453 pendingRequestIterator latestRequest;
5454 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005455 pendingRequest.num_buffers = depthRequestPresent ?
5456 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005457 pendingRequest.request_id = request_id;
5458 pendingRequest.blob_request = blob_request;
5459 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005460 if (request->input_buffer) {
5461 pendingRequest.input_buffer =
5462 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5463 *(pendingRequest.input_buffer) = *(request->input_buffer);
5464 pInputBuffer = pendingRequest.input_buffer;
5465 } else {
5466 pendingRequest.input_buffer = NULL;
5467 pInputBuffer = NULL;
5468 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005469 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005470
5471 pendingRequest.pipeline_depth = 0;
5472 pendingRequest.partial_result_cnt = 0;
5473 extractJpegMetadata(mCurJpegMeta, request);
5474 pendingRequest.jpegMetadata = mCurJpegMeta;
5475 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005476 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005477 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5478 mHybridAeEnable =
5479 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5480 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005481
5482 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5483 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005484 /* DevCamDebug metadata processCaptureRequest */
5485 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5486 mDevCamDebugMetaEnable =
5487 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5488 }
5489 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5490 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005491
5492 //extract CAC info
5493 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5494 mCacMode =
5495 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5496 }
5497 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005498 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005499 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5500 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005501
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005502 // extract enableZsl info
5503 if (gExposeEnableZslKey) {
5504 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5505 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5506 mZslEnabled = pendingRequest.enableZsl;
5507 } else {
5508 pendingRequest.enableZsl = mZslEnabled;
5509 }
5510 }
5511
Thierry Strudel3d639192016-09-09 11:52:26 -07005512 PendingBuffersInRequest bufsForCurRequest;
5513 bufsForCurRequest.frame_number = frameNumber;
5514 // Mark current timestamp for the new request
5515 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005516 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005517
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005518 if (hdrPlusRequest) {
5519 // Save settings for this request.
5520 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5521 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5522
5523 // Add to pending HDR+ request queue.
5524 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5525 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5526
5527 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5528 }
5529
Thierry Strudel3d639192016-09-09 11:52:26 -07005530 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005531 if ((request->output_buffers[i].stream->data_space ==
5532 HAL_DATASPACE_DEPTH) &&
5533 (HAL_PIXEL_FORMAT_BLOB ==
5534 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005535 continue;
5536 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005537 RequestedBufferInfo requestedBuf;
5538 memset(&requestedBuf, 0, sizeof(requestedBuf));
5539 requestedBuf.stream = request->output_buffers[i].stream;
5540 requestedBuf.buffer = NULL;
5541 pendingRequest.buffers.push_back(requestedBuf);
5542
5543 // Add to buffer handle the pending buffers list
5544 PendingBufferInfo bufferInfo;
5545 bufferInfo.buffer = request->output_buffers[i].buffer;
5546 bufferInfo.stream = request->output_buffers[i].stream;
5547 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5548 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5549 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5550 frameNumber, bufferInfo.buffer,
5551 channel->getStreamTypeMask(), bufferInfo.stream->format);
5552 }
5553 // Add this request packet into mPendingBuffersMap
5554 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5555 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5556 mPendingBuffersMap.get_num_overall_buffers());
5557
5558 latestRequest = mPendingRequestsList.insert(
5559 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005560
5561 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5562 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005563 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005564 for (size_t i = 0; i < request->num_output_buffers; i++) {
5565 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5566 }
5567
Thierry Strudel3d639192016-09-09 11:52:26 -07005568 if(mFlush) {
5569 LOGI("mFlush is true");
5570 pthread_mutex_unlock(&mMutex);
5571 return NO_ERROR;
5572 }
5573
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005574 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5575 // channel.
5576 if (!hdrPlusRequest) {
5577 int indexUsed;
5578 // Notify metadata channel we receive a request
5579 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005580
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005581 if(request->input_buffer != NULL){
5582 LOGD("Input request, frame_number %d", frameNumber);
5583 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5584 if (NO_ERROR != rc) {
5585 LOGE("fail to set reproc parameters");
5586 pthread_mutex_unlock(&mMutex);
5587 return rc;
5588 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005589 }
5590
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005591 // Call request on other streams
5592 uint32_t streams_need_metadata = 0;
5593 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5594 for (size_t i = 0; i < request->num_output_buffers; i++) {
5595 const camera3_stream_buffer_t& output = request->output_buffers[i];
5596 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5597
5598 if (channel == NULL) {
5599 LOGW("invalid channel pointer for stream");
5600 continue;
5601 }
5602
5603 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5604 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5605 output.buffer, request->input_buffer, frameNumber);
5606 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005607 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005608 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5609 if (rc < 0) {
5610 LOGE("Fail to request on picture channel");
5611 pthread_mutex_unlock(&mMutex);
5612 return rc;
5613 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005614 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005615 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5616 assert(NULL != mDepthChannel);
5617 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005618
Emilian Peev7650c122017-01-19 08:24:33 -08005619 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5620 if (rc < 0) {
5621 LOGE("Fail to map on depth buffer");
5622 pthread_mutex_unlock(&mMutex);
5623 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005624 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005625 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005626 } else {
5627 LOGD("snapshot request with buffer %p, frame_number %d",
5628 output.buffer, frameNumber);
5629 if (!request->settings) {
5630 rc = channel->request(output.buffer, frameNumber,
5631 NULL, mPrevParameters, indexUsed);
5632 } else {
5633 rc = channel->request(output.buffer, frameNumber,
5634 NULL, mParameters, indexUsed);
5635 }
5636 if (rc < 0) {
5637 LOGE("Fail to request on picture channel");
5638 pthread_mutex_unlock(&mMutex);
5639 return rc;
5640 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005641
Emilian Peev7650c122017-01-19 08:24:33 -08005642 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5643 uint32_t j = 0;
5644 for (j = 0; j < streamsArray.num_streams; j++) {
5645 if (streamsArray.stream_request[j].streamID == streamId) {
5646 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5647 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5648 else
5649 streamsArray.stream_request[j].buf_index = indexUsed;
5650 break;
5651 }
5652 }
5653 if (j == streamsArray.num_streams) {
5654 LOGE("Did not find matching stream to update index");
5655 assert(0);
5656 }
5657
5658 pendingBufferIter->need_metadata = true;
5659 streams_need_metadata++;
5660 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005661 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005662 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5663 bool needMetadata = false;
5664 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5665 rc = yuvChannel->request(output.buffer, frameNumber,
5666 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5667 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005668 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005669 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005670 pthread_mutex_unlock(&mMutex);
5671 return rc;
5672 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005673
5674 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5675 uint32_t j = 0;
5676 for (j = 0; j < streamsArray.num_streams; j++) {
5677 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005678 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5679 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5680 else
5681 streamsArray.stream_request[j].buf_index = indexUsed;
5682 break;
5683 }
5684 }
5685 if (j == streamsArray.num_streams) {
5686 LOGE("Did not find matching stream to update index");
5687 assert(0);
5688 }
5689
5690 pendingBufferIter->need_metadata = needMetadata;
5691 if (needMetadata)
5692 streams_need_metadata += 1;
5693 LOGD("calling YUV channel request, need_metadata is %d",
5694 needMetadata);
5695 } else {
5696 LOGD("request with buffer %p, frame_number %d",
5697 output.buffer, frameNumber);
5698
5699 rc = channel->request(output.buffer, frameNumber, indexUsed);
5700
5701 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5702 uint32_t j = 0;
5703 for (j = 0; j < streamsArray.num_streams; j++) {
5704 if (streamsArray.stream_request[j].streamID == streamId) {
5705 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5706 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5707 else
5708 streamsArray.stream_request[j].buf_index = indexUsed;
5709 break;
5710 }
5711 }
5712 if (j == streamsArray.num_streams) {
5713 LOGE("Did not find matching stream to update index");
5714 assert(0);
5715 }
5716
5717 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5718 && mBatchSize) {
5719 mToBeQueuedVidBufs++;
5720 if (mToBeQueuedVidBufs == mBatchSize) {
5721 channel->queueBatchBuf();
5722 }
5723 }
5724 if (rc < 0) {
5725 LOGE("request failed");
5726 pthread_mutex_unlock(&mMutex);
5727 return rc;
5728 }
5729 }
5730 pendingBufferIter++;
5731 }
5732
5733 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5734 itr++) {
5735 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5736
5737 if (channel == NULL) {
5738 LOGE("invalid channel pointer for stream");
5739 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005740 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005741 return BAD_VALUE;
5742 }
5743
5744 InternalRequest requestedStream;
5745 requestedStream = (*itr);
5746
5747
5748 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5749 LOGD("snapshot request internally input buffer %p, frame_number %d",
5750 request->input_buffer, frameNumber);
5751 if(request->input_buffer != NULL){
5752 rc = channel->request(NULL, frameNumber,
5753 pInputBuffer, &mReprocMeta, indexUsed, true,
5754 requestedStream.meteringOnly);
5755 if (rc < 0) {
5756 LOGE("Fail to request on picture channel");
5757 pthread_mutex_unlock(&mMutex);
5758 return rc;
5759 }
5760 } else {
5761 LOGD("snapshot request with frame_number %d", frameNumber);
5762 if (!request->settings) {
5763 rc = channel->request(NULL, frameNumber,
5764 NULL, mPrevParameters, indexUsed, true,
5765 requestedStream.meteringOnly);
5766 } else {
5767 rc = channel->request(NULL, frameNumber,
5768 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5769 }
5770 if (rc < 0) {
5771 LOGE("Fail to request on picture channel");
5772 pthread_mutex_unlock(&mMutex);
5773 return rc;
5774 }
5775
5776 if ((*itr).meteringOnly != 1) {
5777 requestedStream.need_metadata = 1;
5778 streams_need_metadata++;
5779 }
5780 }
5781
5782 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5783 uint32_t j = 0;
5784 for (j = 0; j < streamsArray.num_streams; j++) {
5785 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005786 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5787 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5788 else
5789 streamsArray.stream_request[j].buf_index = indexUsed;
5790 break;
5791 }
5792 }
5793 if (j == streamsArray.num_streams) {
5794 LOGE("Did not find matching stream to update index");
5795 assert(0);
5796 }
5797
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005798 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005799 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005800 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005801 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005802 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005803 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005804 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005805 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005806
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005807 //If 2 streams have need_metadata set to true, fail the request, unless
5808 //we copy/reference count the metadata buffer
5809 if (streams_need_metadata > 1) {
5810 LOGE("not supporting request in which two streams requires"
5811 " 2 HAL metadata for reprocessing");
5812 pthread_mutex_unlock(&mMutex);
5813 return -EINVAL;
5814 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005815
Emilian Peev656e4fa2017-06-02 16:47:04 +01005816 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5817 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5818 if (depthRequestPresent && mDepthChannel) {
5819 if (request->settings) {
5820 camera_metadata_ro_entry entry;
5821 if (find_camera_metadata_ro_entry(request->settings,
5822 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5823 if (entry.data.u8[0]) {
5824 pdafEnable = CAM_PD_DATA_ENABLED;
5825 } else {
5826 pdafEnable = CAM_PD_DATA_SKIP;
5827 }
5828 mDepthCloudMode = pdafEnable;
5829 } else {
5830 pdafEnable = mDepthCloudMode;
5831 }
5832 } else {
5833 pdafEnable = mDepthCloudMode;
5834 }
5835 }
5836
Emilian Peev7650c122017-01-19 08:24:33 -08005837 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5838 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5839 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5840 pthread_mutex_unlock(&mMutex);
5841 return BAD_VALUE;
5842 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005843
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005844 if (request->input_buffer == NULL) {
5845 /* Set the parameters to backend:
5846 * - For every request in NORMAL MODE
5847 * - For every request in HFR mode during preview only case
5848 * - Once every batch in HFR mode during video recording
5849 */
5850 if (!mBatchSize ||
5851 (mBatchSize && !isVidBufRequested) ||
5852 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5853 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5854 mBatchSize, isVidBufRequested,
5855 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005856
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005857 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5858 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5859 uint32_t m = 0;
5860 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5861 if (streamsArray.stream_request[k].streamID ==
5862 mBatchedStreamsArray.stream_request[m].streamID)
5863 break;
5864 }
5865 if (m == mBatchedStreamsArray.num_streams) {
5866 mBatchedStreamsArray.stream_request\
5867 [mBatchedStreamsArray.num_streams].streamID =
5868 streamsArray.stream_request[k].streamID;
5869 mBatchedStreamsArray.stream_request\
5870 [mBatchedStreamsArray.num_streams].buf_index =
5871 streamsArray.stream_request[k].buf_index;
5872 mBatchedStreamsArray.num_streams =
5873 mBatchedStreamsArray.num_streams + 1;
5874 }
5875 }
5876 streamsArray = mBatchedStreamsArray;
5877 }
5878 /* Update stream id of all the requested buffers */
5879 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5880 streamsArray)) {
5881 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005882 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005883 return BAD_VALUE;
5884 }
5885
5886 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5887 mParameters);
5888 if (rc < 0) {
5889 LOGE("set_parms failed");
5890 }
5891 /* reset to zero coz, the batch is queued */
5892 mToBeQueuedVidBufs = 0;
5893 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5894 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5895 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005896 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5897 uint32_t m = 0;
5898 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5899 if (streamsArray.stream_request[k].streamID ==
5900 mBatchedStreamsArray.stream_request[m].streamID)
5901 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005902 }
5903 if (m == mBatchedStreamsArray.num_streams) {
5904 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5905 streamID = streamsArray.stream_request[k].streamID;
5906 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5907 buf_index = streamsArray.stream_request[k].buf_index;
5908 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5909 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005910 }
5911 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005912 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005913
5914 // Start all streams after the first setting is sent, so that the
5915 // setting can be applied sooner: (0 + apply_delay)th frame.
5916 if (mState == CONFIGURED && mChannelHandle) {
5917 //Then start them.
5918 LOGH("Start META Channel");
5919 rc = mMetadataChannel->start();
5920 if (rc < 0) {
5921 LOGE("META channel start failed");
5922 pthread_mutex_unlock(&mMutex);
5923 return rc;
5924 }
5925
5926 if (mAnalysisChannel) {
5927 rc = mAnalysisChannel->start();
5928 if (rc < 0) {
5929 LOGE("Analysis channel start failed");
5930 mMetadataChannel->stop();
5931 pthread_mutex_unlock(&mMutex);
5932 return rc;
5933 }
5934 }
5935
5936 if (mSupportChannel) {
5937 rc = mSupportChannel->start();
5938 if (rc < 0) {
5939 LOGE("Support channel start failed");
5940 mMetadataChannel->stop();
5941 /* Although support and analysis are mutually exclusive today
5942 adding it in anycase for future proofing */
5943 if (mAnalysisChannel) {
5944 mAnalysisChannel->stop();
5945 }
5946 pthread_mutex_unlock(&mMutex);
5947 return rc;
5948 }
5949 }
5950 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5951 it != mStreamInfo.end(); it++) {
5952 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5953 LOGH("Start Processing Channel mask=%d",
5954 channel->getStreamTypeMask());
5955 rc = channel->start();
5956 if (rc < 0) {
5957 LOGE("channel start failed");
5958 pthread_mutex_unlock(&mMutex);
5959 return rc;
5960 }
5961 }
5962
5963 if (mRawDumpChannel) {
5964 LOGD("Starting raw dump stream");
5965 rc = mRawDumpChannel->start();
5966 if (rc != NO_ERROR) {
5967 LOGE("Error Starting Raw Dump Channel");
5968 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5969 it != mStreamInfo.end(); it++) {
5970 QCamera3Channel *channel =
5971 (QCamera3Channel *)(*it)->stream->priv;
5972 LOGH("Stopping Processing Channel mask=%d",
5973 channel->getStreamTypeMask());
5974 channel->stop();
5975 }
5976 if (mSupportChannel)
5977 mSupportChannel->stop();
5978 if (mAnalysisChannel) {
5979 mAnalysisChannel->stop();
5980 }
5981 mMetadataChannel->stop();
5982 pthread_mutex_unlock(&mMutex);
5983 return rc;
5984 }
5985 }
5986
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005987 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005988 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005989 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005990 if (rc != NO_ERROR) {
5991 LOGE("start_channel failed %d", rc);
5992 pthread_mutex_unlock(&mMutex);
5993 return rc;
5994 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005995
5996 {
5997 // Configure Easel for stream on.
5998 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005999
6000 // Now that sensor mode should have been selected, get the selected sensor mode
6001 // info.
6002 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6003 getCurrentSensorModeInfo(mSensorModeInfo);
6004
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006005 if (EaselManagerClientOpened) {
6006 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006007 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6008 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006009 if (rc != OK) {
6010 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6011 mCameraId, mSensorModeInfo.op_pixel_clk);
6012 pthread_mutex_unlock(&mMutex);
6013 return rc;
6014 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006015 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006016 }
6017 }
6018
6019 // Start sensor streaming.
6020 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6021 mChannelHandle);
6022 if (rc != NO_ERROR) {
6023 LOGE("start_sensor_stream_on failed %d", rc);
6024 pthread_mutex_unlock(&mMutex);
6025 return rc;
6026 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006027 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006028 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006029 }
6030
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006031 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07006032 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006033 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006034 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006035 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6036 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6037 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6038 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
6039 rc = enableHdrPlusModeLocked();
6040 if (rc != OK) {
6041 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6042 pthread_mutex_unlock(&mMutex);
6043 return rc;
6044 }
6045
6046 mFirstPreviewIntentSeen = true;
6047 }
6048 }
6049
Thierry Strudel3d639192016-09-09 11:52:26 -07006050 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6051
6052 mState = STARTED;
6053 // Added a timed condition wait
6054 struct timespec ts;
6055 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006056 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006057 if (rc < 0) {
6058 isValidTimeout = 0;
6059 LOGE("Error reading the real time clock!!");
6060 }
6061 else {
6062 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006063 int64_t timeout = 5;
6064 {
6065 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6066 // If there is a pending HDR+ request, the following requests may be blocked until the
6067 // HDR+ request is done. So allow a longer timeout.
6068 if (mHdrPlusPendingRequests.size() > 0) {
6069 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6070 }
6071 }
6072 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006073 }
6074 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006075 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006076 (mState != ERROR) && (mState != DEINIT)) {
6077 if (!isValidTimeout) {
6078 LOGD("Blocking on conditional wait");
6079 pthread_cond_wait(&mRequestCond, &mMutex);
6080 }
6081 else {
6082 LOGD("Blocking on timed conditional wait");
6083 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6084 if (rc == ETIMEDOUT) {
6085 rc = -ENODEV;
6086 LOGE("Unblocked on timeout!!!!");
6087 break;
6088 }
6089 }
6090 LOGD("Unblocked");
6091 if (mWokenUpByDaemon) {
6092 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006093 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006094 break;
6095 }
6096 }
6097 pthread_mutex_unlock(&mMutex);
6098
6099 return rc;
6100}
6101
6102/*===========================================================================
6103 * FUNCTION : dump
6104 *
6105 * DESCRIPTION:
6106 *
6107 * PARAMETERS :
6108 *
6109 *
6110 * RETURN :
6111 *==========================================================================*/
6112void QCamera3HardwareInterface::dump(int fd)
6113{
6114 pthread_mutex_lock(&mMutex);
6115 dprintf(fd, "\n Camera HAL3 information Begin \n");
6116
6117 dprintf(fd, "\nNumber of pending requests: %zu \n",
6118 mPendingRequestsList.size());
6119 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6120 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6121 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6122 for(pendingRequestIterator i = mPendingRequestsList.begin();
6123 i != mPendingRequestsList.end(); i++) {
6124 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6125 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6126 i->input_buffer);
6127 }
6128 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6129 mPendingBuffersMap.get_num_overall_buffers());
6130 dprintf(fd, "-------+------------------\n");
6131 dprintf(fd, " Frame | Stream type mask \n");
6132 dprintf(fd, "-------+------------------\n");
6133 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6134 for(auto &j : req.mPendingBufferList) {
6135 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6136 dprintf(fd, " %5d | %11d \n",
6137 req.frame_number, channel->getStreamTypeMask());
6138 }
6139 }
6140 dprintf(fd, "-------+------------------\n");
6141
6142 dprintf(fd, "\nPending frame drop list: %zu\n",
6143 mPendingFrameDropList.size());
6144 dprintf(fd, "-------+-----------\n");
6145 dprintf(fd, " Frame | Stream ID \n");
6146 dprintf(fd, "-------+-----------\n");
6147 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6148 i != mPendingFrameDropList.end(); i++) {
6149 dprintf(fd, " %5d | %9d \n",
6150 i->frame_number, i->stream_ID);
6151 }
6152 dprintf(fd, "-------+-----------\n");
6153
6154 dprintf(fd, "\n Camera HAL3 information End \n");
6155
6156 /* use dumpsys media.camera as trigger to send update debug level event */
6157 mUpdateDebugLevel = true;
6158 pthread_mutex_unlock(&mMutex);
6159 return;
6160}
6161
6162/*===========================================================================
6163 * FUNCTION : flush
6164 *
6165 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6166 * conditionally restarts channels
6167 *
6168 * PARAMETERS :
6169 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006170 * @ stopChannelImmediately: stop the channel immediately. This should be used
6171 * when device encountered an error and MIPI may has
6172 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006173 *
6174 * RETURN :
6175 * 0 on success
6176 * Error code on failure
6177 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006178int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006179{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006180 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006181 int32_t rc = NO_ERROR;
6182
6183 LOGD("Unblocking Process Capture Request");
6184 pthread_mutex_lock(&mMutex);
6185 mFlush = true;
6186 pthread_mutex_unlock(&mMutex);
6187
6188 rc = stopAllChannels();
6189 // unlink of dualcam
6190 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006191 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6192 &m_pDualCamCmdPtr->bundle_info;
6193 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006194 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6195 pthread_mutex_lock(&gCamLock);
6196
6197 if (mIsMainCamera == 1) {
6198 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6199 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006200 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006201 // related session id should be session id of linked session
6202 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6203 } else {
6204 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6205 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006206 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006207 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6208 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006209 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006210 pthread_mutex_unlock(&gCamLock);
6211
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006212 rc = mCameraHandle->ops->set_dual_cam_cmd(
6213 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006214 if (rc < 0) {
6215 LOGE("Dualcam: Unlink failed, but still proceed to close");
6216 }
6217 }
6218
6219 if (rc < 0) {
6220 LOGE("stopAllChannels failed");
6221 return rc;
6222 }
6223 if (mChannelHandle) {
6224 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006225 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006226 }
6227
6228 // Reset bundle info
6229 rc = setBundleInfo();
6230 if (rc < 0) {
6231 LOGE("setBundleInfo failed %d", rc);
6232 return rc;
6233 }
6234
6235 // Mutex Lock
6236 pthread_mutex_lock(&mMutex);
6237
6238 // Unblock process_capture_request
6239 mPendingLiveRequest = 0;
6240 pthread_cond_signal(&mRequestCond);
6241
6242 rc = notifyErrorForPendingRequests();
6243 if (rc < 0) {
6244 LOGE("notifyErrorForPendingRequests failed");
6245 pthread_mutex_unlock(&mMutex);
6246 return rc;
6247 }
6248
6249 mFlush = false;
6250
6251 // Start the Streams/Channels
6252 if (restartChannels) {
6253 rc = startAllChannels();
6254 if (rc < 0) {
6255 LOGE("startAllChannels failed");
6256 pthread_mutex_unlock(&mMutex);
6257 return rc;
6258 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006259 if (mChannelHandle) {
6260 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006261 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006262 if (rc < 0) {
6263 LOGE("start_channel failed");
6264 pthread_mutex_unlock(&mMutex);
6265 return rc;
6266 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006267 }
6268 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006269 pthread_mutex_unlock(&mMutex);
6270
6271 return 0;
6272}
6273
6274/*===========================================================================
6275 * FUNCTION : flushPerf
6276 *
6277 * DESCRIPTION: This is the performance optimization version of flush that does
6278 * not use stream off, rather flushes the system
6279 *
6280 * PARAMETERS :
6281 *
6282 *
6283 * RETURN : 0 : success
6284 * -EINVAL: input is malformed (device is not valid)
6285 * -ENODEV: if the device has encountered a serious error
6286 *==========================================================================*/
6287int QCamera3HardwareInterface::flushPerf()
6288{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006289 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006290 int32_t rc = 0;
6291 struct timespec timeout;
6292 bool timed_wait = false;
6293
6294 pthread_mutex_lock(&mMutex);
6295 mFlushPerf = true;
6296 mPendingBuffersMap.numPendingBufsAtFlush =
6297 mPendingBuffersMap.get_num_overall_buffers();
6298 LOGD("Calling flush. Wait for %d buffers to return",
6299 mPendingBuffersMap.numPendingBufsAtFlush);
6300
6301 /* send the flush event to the backend */
6302 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6303 if (rc < 0) {
6304 LOGE("Error in flush: IOCTL failure");
6305 mFlushPerf = false;
6306 pthread_mutex_unlock(&mMutex);
6307 return -ENODEV;
6308 }
6309
6310 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6311 LOGD("No pending buffers in HAL, return flush");
6312 mFlushPerf = false;
6313 pthread_mutex_unlock(&mMutex);
6314 return rc;
6315 }
6316
6317 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006318 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006319 if (rc < 0) {
6320 LOGE("Error reading the real time clock, cannot use timed wait");
6321 } else {
6322 timeout.tv_sec += FLUSH_TIMEOUT;
6323 timed_wait = true;
6324 }
6325
6326 //Block on conditional variable
6327 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6328 LOGD("Waiting on mBuffersCond");
6329 if (!timed_wait) {
6330 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6331 if (rc != 0) {
6332 LOGE("pthread_cond_wait failed due to rc = %s",
6333 strerror(rc));
6334 break;
6335 }
6336 } else {
6337 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6338 if (rc != 0) {
6339 LOGE("pthread_cond_timedwait failed due to rc = %s",
6340 strerror(rc));
6341 break;
6342 }
6343 }
6344 }
6345 if (rc != 0) {
6346 mFlushPerf = false;
6347 pthread_mutex_unlock(&mMutex);
6348 return -ENODEV;
6349 }
6350
6351 LOGD("Received buffers, now safe to return them");
6352
6353 //make sure the channels handle flush
6354 //currently only required for the picture channel to release snapshot resources
6355 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6356 it != mStreamInfo.end(); it++) {
6357 QCamera3Channel *channel = (*it)->channel;
6358 if (channel) {
6359 rc = channel->flush();
6360 if (rc) {
6361 LOGE("Flushing the channels failed with error %d", rc);
6362 // even though the channel flush failed we need to continue and
6363 // return the buffers we have to the framework, however the return
6364 // value will be an error
6365 rc = -ENODEV;
6366 }
6367 }
6368 }
6369
6370 /* notify the frameworks and send errored results */
6371 rc = notifyErrorForPendingRequests();
6372 if (rc < 0) {
6373 LOGE("notifyErrorForPendingRequests failed");
6374 pthread_mutex_unlock(&mMutex);
6375 return rc;
6376 }
6377
6378 //unblock process_capture_request
6379 mPendingLiveRequest = 0;
6380 unblockRequestIfNecessary();
6381
6382 mFlushPerf = false;
6383 pthread_mutex_unlock(&mMutex);
6384 LOGD ("Flush Operation complete. rc = %d", rc);
6385 return rc;
6386}
6387
6388/*===========================================================================
6389 * FUNCTION : handleCameraDeviceError
6390 *
6391 * DESCRIPTION: This function calls internal flush and notifies the error to
6392 * framework and updates the state variable.
6393 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006394 * PARAMETERS :
6395 * @stopChannelImmediately : stop channels immediately without waiting for
6396 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006397 *
6398 * RETURN : NO_ERROR on Success
6399 * Error code on failure
6400 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006401int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006402{
6403 int32_t rc = NO_ERROR;
6404
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006405 {
6406 Mutex::Autolock lock(mFlushLock);
6407 pthread_mutex_lock(&mMutex);
6408 if (mState != ERROR) {
6409 //if mState != ERROR, nothing to be done
6410 pthread_mutex_unlock(&mMutex);
6411 return NO_ERROR;
6412 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006413 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006414
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006415 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006416 if (NO_ERROR != rc) {
6417 LOGE("internal flush to handle mState = ERROR failed");
6418 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006419
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006420 pthread_mutex_lock(&mMutex);
6421 mState = DEINIT;
6422 pthread_mutex_unlock(&mMutex);
6423 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006424
6425 camera3_notify_msg_t notify_msg;
6426 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6427 notify_msg.type = CAMERA3_MSG_ERROR;
6428 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6429 notify_msg.message.error.error_stream = NULL;
6430 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006431 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006432
6433 return rc;
6434}
6435
6436/*===========================================================================
6437 * FUNCTION : captureResultCb
6438 *
6439 * DESCRIPTION: Callback handler for all capture result
6440 * (streams, as well as metadata)
6441 *
6442 * PARAMETERS :
6443 * @metadata : metadata information
6444 * @buffer : actual gralloc buffer to be returned to frameworks.
6445 * NULL if metadata.
6446 *
6447 * RETURN : NONE
6448 *==========================================================================*/
6449void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6450 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6451{
6452 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006453 pthread_mutex_lock(&mMutex);
6454 uint8_t batchSize = mBatchSize;
6455 pthread_mutex_unlock(&mMutex);
6456 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006457 handleBatchMetadata(metadata_buf,
6458 true /* free_and_bufdone_meta_buf */);
6459 } else { /* mBatchSize = 0 */
6460 hdrPlusPerfLock(metadata_buf);
6461 pthread_mutex_lock(&mMutex);
6462 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006463 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006464 true /* last urgent frame of batch metadata */,
6465 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006466 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006467 pthread_mutex_unlock(&mMutex);
6468 }
6469 } else if (isInputBuffer) {
6470 pthread_mutex_lock(&mMutex);
6471 handleInputBufferWithLock(frame_number);
6472 pthread_mutex_unlock(&mMutex);
6473 } else {
6474 pthread_mutex_lock(&mMutex);
6475 handleBufferWithLock(buffer, frame_number);
6476 pthread_mutex_unlock(&mMutex);
6477 }
6478 return;
6479}
6480
6481/*===========================================================================
6482 * FUNCTION : getReprocessibleOutputStreamId
6483 *
6484 * DESCRIPTION: Get source output stream id for the input reprocess stream
6485 * based on size and format, which would be the largest
6486 * output stream if an input stream exists.
6487 *
6488 * PARAMETERS :
6489 * @id : return the stream id if found
6490 *
6491 * RETURN : int32_t type of status
6492 * NO_ERROR -- success
6493 * none-zero failure code
6494 *==========================================================================*/
6495int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6496{
6497 /* check if any output or bidirectional stream with the same size and format
6498 and return that stream */
6499 if ((mInputStreamInfo.dim.width > 0) &&
6500 (mInputStreamInfo.dim.height > 0)) {
6501 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6502 it != mStreamInfo.end(); it++) {
6503
6504 camera3_stream_t *stream = (*it)->stream;
6505 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6506 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6507 (stream->format == mInputStreamInfo.format)) {
6508 // Usage flag for an input stream and the source output stream
6509 // may be different.
6510 LOGD("Found reprocessible output stream! %p", *it);
6511 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6512 stream->usage, mInputStreamInfo.usage);
6513
6514 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6515 if (channel != NULL && channel->mStreams[0]) {
6516 id = channel->mStreams[0]->getMyServerID();
6517 return NO_ERROR;
6518 }
6519 }
6520 }
6521 } else {
6522 LOGD("No input stream, so no reprocessible output stream");
6523 }
6524 return NAME_NOT_FOUND;
6525}
6526
6527/*===========================================================================
6528 * FUNCTION : lookupFwkName
6529 *
6530 * DESCRIPTION: In case the enum is not same in fwk and backend
6531 * make sure the parameter is correctly propogated
6532 *
6533 * PARAMETERS :
6534 * @arr : map between the two enums
6535 * @len : len of the map
6536 * @hal_name : name of the hal_parm to map
6537 *
6538 * RETURN : int type of status
6539 * fwk_name -- success
6540 * none-zero failure code
6541 *==========================================================================*/
6542template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6543 size_t len, halType hal_name)
6544{
6545
6546 for (size_t i = 0; i < len; i++) {
6547 if (arr[i].hal_name == hal_name) {
6548 return arr[i].fwk_name;
6549 }
6550 }
6551
6552 /* Not able to find matching framework type is not necessarily
6553 * an error case. This happens when mm-camera supports more attributes
6554 * than the frameworks do */
6555 LOGH("Cannot find matching framework type");
6556 return NAME_NOT_FOUND;
6557}
6558
6559/*===========================================================================
6560 * FUNCTION : lookupHalName
6561 *
6562 * DESCRIPTION: In case the enum is not same in fwk and backend
6563 * make sure the parameter is correctly propogated
6564 *
6565 * PARAMETERS :
6566 * @arr : map between the two enums
6567 * @len : len of the map
6568 * @fwk_name : name of the hal_parm to map
6569 *
6570 * RETURN : int32_t type of status
6571 * hal_name -- success
6572 * none-zero failure code
6573 *==========================================================================*/
6574template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6575 size_t len, fwkType fwk_name)
6576{
6577 for (size_t i = 0; i < len; i++) {
6578 if (arr[i].fwk_name == fwk_name) {
6579 return arr[i].hal_name;
6580 }
6581 }
6582
6583 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6584 return NAME_NOT_FOUND;
6585}
6586
6587/*===========================================================================
6588 * FUNCTION : lookupProp
6589 *
6590 * DESCRIPTION: lookup a value by its name
6591 *
6592 * PARAMETERS :
6593 * @arr : map between the two enums
6594 * @len : size of the map
6595 * @name : name to be looked up
6596 *
6597 * RETURN : Value if found
6598 * CAM_CDS_MODE_MAX if not found
6599 *==========================================================================*/
6600template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6601 size_t len, const char *name)
6602{
6603 if (name) {
6604 for (size_t i = 0; i < len; i++) {
6605 if (!strcmp(arr[i].desc, name)) {
6606 return arr[i].val;
6607 }
6608 }
6609 }
6610 return CAM_CDS_MODE_MAX;
6611}
6612
6613/*===========================================================================
6614 *
6615 * DESCRIPTION:
6616 *
6617 * PARAMETERS :
6618 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006619 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006620 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006621 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6622 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006623 *
6624 * RETURN : camera_metadata_t*
6625 * metadata in a format specified by fwk
6626 *==========================================================================*/
6627camera_metadata_t*
6628QCamera3HardwareInterface::translateFromHalMetadata(
6629 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006630 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006631 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006632 bool lastMetadataInBatch,
6633 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006634{
6635 CameraMetadata camMetadata;
6636 camera_metadata_t *resultMetadata;
6637
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006638 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006639 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6640 * Timestamp is needed because it's used for shutter notify calculation.
6641 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006642 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006643 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006644 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006645 }
6646
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006647 if (pendingRequest.jpegMetadata.entryCount())
6648 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006649
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006650 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6651 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6652 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6653 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6654 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006655 if (mBatchSize == 0) {
6656 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006657 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006658 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006659
Samuel Ha68ba5172016-12-15 18:41:12 -08006660 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6661 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006662 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006663 // DevCamDebug metadata translateFromHalMetadata AF
6664 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6665 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6666 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6667 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6668 }
6669 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6670 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6671 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6672 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6673 }
6674 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6675 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6676 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6677 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6678 }
6679 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6680 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6681 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6682 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6683 }
6684 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6685 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6686 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6687 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6688 }
6689 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6690 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6691 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6692 *DevCamDebug_af_monitor_pdaf_target_pos;
6693 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6694 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6695 }
6696 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6697 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6698 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6699 *DevCamDebug_af_monitor_pdaf_confidence;
6700 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6701 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6702 }
6703 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6704 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6705 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6706 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6707 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6708 }
6709 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6710 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6711 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6712 *DevCamDebug_af_monitor_tof_target_pos;
6713 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6714 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6715 }
6716 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6717 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6718 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6719 *DevCamDebug_af_monitor_tof_confidence;
6720 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6721 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6722 }
6723 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6724 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6725 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6726 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6727 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6728 }
6729 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6730 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6731 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6732 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6733 &fwk_DevCamDebug_af_monitor_type_select, 1);
6734 }
6735 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6736 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6737 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6738 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6739 &fwk_DevCamDebug_af_monitor_refocus, 1);
6740 }
6741 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6742 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6743 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6744 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6745 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6746 }
6747 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6748 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6749 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6750 *DevCamDebug_af_search_pdaf_target_pos;
6751 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6752 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6753 }
6754 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6755 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6756 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6757 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6758 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6759 }
6760 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6761 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6762 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6763 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6764 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6765 }
6766 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6767 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6768 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6769 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6770 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6771 }
6772 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6773 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6774 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6775 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6776 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6777 }
6778 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6779 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6780 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6781 *DevCamDebug_af_search_tof_target_pos;
6782 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6783 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6784 }
6785 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6786 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6787 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6788 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6789 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6790 }
6791 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6792 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6793 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6794 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6795 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6796 }
6797 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6798 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6799 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6800 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6801 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6802 }
6803 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6804 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6805 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6806 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6807 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6808 }
6809 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6810 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6811 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6812 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6813 &fwk_DevCamDebug_af_search_type_select, 1);
6814 }
6815 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6816 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6817 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6818 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6819 &fwk_DevCamDebug_af_search_next_pos, 1);
6820 }
6821 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6822 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6823 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6824 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6825 &fwk_DevCamDebug_af_search_target_pos, 1);
6826 }
6827 // DevCamDebug metadata translateFromHalMetadata AEC
6828 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6829 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6830 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6831 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6832 }
6833 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6834 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6835 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6836 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6837 }
6838 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6839 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6840 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6841 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6842 }
6843 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6844 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6845 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6846 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6847 }
6848 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6849 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6850 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6851 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6852 }
6853 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6854 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6855 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6856 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6857 }
6858 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6859 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6860 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6861 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6862 }
6863 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6864 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6865 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6866 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6867 }
Samuel Ha34229982017-02-17 13:51:11 -08006868 // DevCamDebug metadata translateFromHalMetadata zzHDR
6869 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6870 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6871 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6872 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6873 }
6874 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6875 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006876 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006877 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6878 }
6879 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6880 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6881 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6882 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6883 }
6884 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6885 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006886 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006887 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6888 }
6889 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6890 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6891 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6892 *DevCamDebug_aec_hdr_sensitivity_ratio;
6893 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6894 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6895 }
6896 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6897 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6898 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6899 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6900 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6901 }
6902 // DevCamDebug metadata translateFromHalMetadata ADRC
6903 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6904 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6905 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6906 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6907 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6908 }
6909 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6910 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6911 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6912 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6913 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6914 }
6915 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6916 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6917 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6918 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6919 }
6920 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6921 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6922 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6923 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6924 }
6925 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6926 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6927 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6928 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6929 }
6930 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6931 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6932 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6933 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6934 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006935 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6936 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6937 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6938 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6939 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6940 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6941 }
6942 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6943 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6944 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6945 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6946 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6947 }
6948 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6949 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6950 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6951 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6952 &fwk_DevCamDebug_aec_subject_motion, 1);
6953 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006954 // DevCamDebug metadata translateFromHalMetadata AWB
6955 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6956 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6957 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6958 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6959 }
6960 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6961 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6962 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6963 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6964 }
6965 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6966 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6967 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6968 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6969 }
6970 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6971 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6972 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6973 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6974 }
6975 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6976 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6977 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6978 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6979 }
6980 }
6981 // atrace_end(ATRACE_TAG_ALWAYS);
6982
Thierry Strudel3d639192016-09-09 11:52:26 -07006983 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6984 int64_t fwk_frame_number = *frame_number;
6985 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6986 }
6987
6988 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6989 int32_t fps_range[2];
6990 fps_range[0] = (int32_t)float_range->min_fps;
6991 fps_range[1] = (int32_t)float_range->max_fps;
6992 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6993 fps_range, 2);
6994 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6995 fps_range[0], fps_range[1]);
6996 }
6997
6998 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6999 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7000 }
7001
7002 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7003 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7004 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7005 *sceneMode);
7006 if (NAME_NOT_FOUND != val) {
7007 uint8_t fwkSceneMode = (uint8_t)val;
7008 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7009 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7010 fwkSceneMode);
7011 }
7012 }
7013
7014 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7015 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7016 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7017 }
7018
7019 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7020 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7021 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7022 }
7023
7024 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7025 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7026 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7027 }
7028
7029 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7030 CAM_INTF_META_EDGE_MODE, metadata) {
7031 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7032 }
7033
7034 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7035 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7036 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7037 }
7038
7039 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7040 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7041 }
7042
7043 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7044 if (0 <= *flashState) {
7045 uint8_t fwk_flashState = (uint8_t) *flashState;
7046 if (!gCamCapability[mCameraId]->flash_available) {
7047 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7048 }
7049 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7050 }
7051 }
7052
7053 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7054 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7055 if (NAME_NOT_FOUND != val) {
7056 uint8_t fwk_flashMode = (uint8_t)val;
7057 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7058 }
7059 }
7060
7061 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7062 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7063 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7064 }
7065
7066 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7067 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7068 }
7069
7070 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7071 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7072 }
7073
7074 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7075 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7076 }
7077
7078 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7079 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7080 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7081 }
7082
7083 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7084 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7085 LOGD("fwk_videoStab = %d", fwk_videoStab);
7086 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7087 } else {
7088 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7089 // and so hardcoding the Video Stab result to OFF mode.
7090 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7091 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007092 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007093 }
7094
7095 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7096 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7097 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7098 }
7099
7100 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7101 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7102 }
7103
Thierry Strudel3d639192016-09-09 11:52:26 -07007104 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7105 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007106 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007107
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007108 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7109 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007110
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007111 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007112 blackLevelAppliedPattern->cam_black_level[0],
7113 blackLevelAppliedPattern->cam_black_level[1],
7114 blackLevelAppliedPattern->cam_black_level[2],
7115 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007116 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7117 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007118
7119#ifndef USE_HAL_3_3
7120 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307121 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007122 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307123 fwk_blackLevelInd[0] /= 16.0;
7124 fwk_blackLevelInd[1] /= 16.0;
7125 fwk_blackLevelInd[2] /= 16.0;
7126 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007127 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7128 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007129#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007130 }
7131
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132#ifndef USE_HAL_3_3
7133 // Fixed whitelevel is used by ISP/Sensor
7134 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7135 &gCamCapability[mCameraId]->white_level, 1);
7136#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007137
7138 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7139 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7140 int32_t scalerCropRegion[4];
7141 scalerCropRegion[0] = hScalerCropRegion->left;
7142 scalerCropRegion[1] = hScalerCropRegion->top;
7143 scalerCropRegion[2] = hScalerCropRegion->width;
7144 scalerCropRegion[3] = hScalerCropRegion->height;
7145
7146 // Adjust crop region from sensor output coordinate system to active
7147 // array coordinate system.
7148 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7149 scalerCropRegion[2], scalerCropRegion[3]);
7150
7151 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7152 }
7153
7154 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7155 LOGD("sensorExpTime = %lld", *sensorExpTime);
7156 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7157 }
7158
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007159 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7160 LOGD("expTimeBoost = %f", *expTimeBoost);
7161 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7162 }
7163
Thierry Strudel3d639192016-09-09 11:52:26 -07007164 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7165 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7166 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7167 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7168 }
7169
7170 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7171 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7172 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7173 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7174 sensorRollingShutterSkew, 1);
7175 }
7176
7177 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7178 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7179 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7180
7181 //calculate the noise profile based on sensitivity
7182 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7183 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7184 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7185 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7186 noise_profile[i] = noise_profile_S;
7187 noise_profile[i+1] = noise_profile_O;
7188 }
7189 LOGD("noise model entry (S, O) is (%f, %f)",
7190 noise_profile_S, noise_profile_O);
7191 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7192 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7193 }
7194
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007195#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007196 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007197 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007198 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007199 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007200 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7201 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7202 }
7203 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007204#endif
7205
Thierry Strudel3d639192016-09-09 11:52:26 -07007206 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7207 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7208 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7209 }
7210
7211 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7212 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7213 *faceDetectMode);
7214 if (NAME_NOT_FOUND != val) {
7215 uint8_t fwk_faceDetectMode = (uint8_t)val;
7216 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7217
7218 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7219 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7220 CAM_INTF_META_FACE_DETECTION, metadata) {
7221 uint8_t numFaces = MIN(
7222 faceDetectionInfo->num_faces_detected, MAX_ROI);
7223 int32_t faceIds[MAX_ROI];
7224 uint8_t faceScores[MAX_ROI];
7225 int32_t faceRectangles[MAX_ROI * 4];
7226 int32_t faceLandmarks[MAX_ROI * 6];
7227 size_t j = 0, k = 0;
7228
7229 for (size_t i = 0; i < numFaces; i++) {
7230 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7231 // Adjust crop region from sensor output coordinate system to active
7232 // array coordinate system.
7233 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7234 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7235 rect.width, rect.height);
7236
7237 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7238 faceRectangles+j, -1);
7239
Jason Lee8ce36fa2017-04-19 19:40:37 -07007240 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7241 "bottom-right (%d, %d)",
7242 faceDetectionInfo->frame_id, i,
7243 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7244 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7245
Thierry Strudel3d639192016-09-09 11:52:26 -07007246 j+= 4;
7247 }
7248 if (numFaces <= 0) {
7249 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7250 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7251 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7252 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7253 }
7254
7255 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7256 numFaces);
7257 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7258 faceRectangles, numFaces * 4U);
7259 if (fwk_faceDetectMode ==
7260 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7261 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7262 CAM_INTF_META_FACE_LANDMARK, metadata) {
7263
7264 for (size_t i = 0; i < numFaces; i++) {
7265 // Map the co-ordinate sensor output coordinate system to active
7266 // array coordinate system.
7267 mCropRegionMapper.toActiveArray(
7268 landmarks->face_landmarks[i].left_eye_center.x,
7269 landmarks->face_landmarks[i].left_eye_center.y);
7270 mCropRegionMapper.toActiveArray(
7271 landmarks->face_landmarks[i].right_eye_center.x,
7272 landmarks->face_landmarks[i].right_eye_center.y);
7273 mCropRegionMapper.toActiveArray(
7274 landmarks->face_landmarks[i].mouth_center.x,
7275 landmarks->face_landmarks[i].mouth_center.y);
7276
7277 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007278
7279 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7280 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7281 faceDetectionInfo->frame_id, i,
7282 faceLandmarks[k + LEFT_EYE_X],
7283 faceLandmarks[k + LEFT_EYE_Y],
7284 faceLandmarks[k + RIGHT_EYE_X],
7285 faceLandmarks[k + RIGHT_EYE_Y],
7286 faceLandmarks[k + MOUTH_X],
7287 faceLandmarks[k + MOUTH_Y]);
7288
Thierry Strudel04e026f2016-10-10 11:27:36 -07007289 k+= TOTAL_LANDMARK_INDICES;
7290 }
7291 } else {
7292 for (size_t i = 0; i < numFaces; i++) {
7293 setInvalidLandmarks(faceLandmarks+k);
7294 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007295 }
7296 }
7297
Jason Lee49619db2017-04-13 12:07:22 -07007298 for (size_t i = 0; i < numFaces; i++) {
7299 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7300
7301 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7302 faceDetectionInfo->frame_id, i, faceIds[i]);
7303 }
7304
Thierry Strudel3d639192016-09-09 11:52:26 -07007305 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7306 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7307 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007308 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007309 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7310 CAM_INTF_META_FACE_BLINK, metadata) {
7311 uint8_t detected[MAX_ROI];
7312 uint8_t degree[MAX_ROI * 2];
7313 for (size_t i = 0; i < numFaces; i++) {
7314 detected[i] = blinks->blink[i].blink_detected;
7315 degree[2 * i] = blinks->blink[i].left_blink;
7316 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007317
Jason Lee49619db2017-04-13 12:07:22 -07007318 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7319 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7320 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7321 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007322 }
7323 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7324 detected, numFaces);
7325 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7326 degree, numFaces * 2);
7327 }
7328 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7329 CAM_INTF_META_FACE_SMILE, metadata) {
7330 uint8_t degree[MAX_ROI];
7331 uint8_t confidence[MAX_ROI];
7332 for (size_t i = 0; i < numFaces; i++) {
7333 degree[i] = smiles->smile[i].smile_degree;
7334 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007335
Jason Lee49619db2017-04-13 12:07:22 -07007336 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7337 "smile_degree=%d, smile_score=%d",
7338 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007339 }
7340 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7341 degree, numFaces);
7342 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7343 confidence, numFaces);
7344 }
7345 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7346 CAM_INTF_META_FACE_GAZE, metadata) {
7347 int8_t angle[MAX_ROI];
7348 int32_t direction[MAX_ROI * 3];
7349 int8_t degree[MAX_ROI * 2];
7350 for (size_t i = 0; i < numFaces; i++) {
7351 angle[i] = gazes->gaze[i].gaze_angle;
7352 direction[3 * i] = gazes->gaze[i].updown_dir;
7353 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7354 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7355 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7356 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007357
7358 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7359 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7360 "left_right_gaze=%d, top_bottom_gaze=%d",
7361 faceDetectionInfo->frame_id, i, angle[i],
7362 direction[3 * i], direction[3 * i + 1],
7363 direction[3 * i + 2],
7364 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007365 }
7366 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7367 (uint8_t *)angle, numFaces);
7368 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7369 direction, numFaces * 3);
7370 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7371 (uint8_t *)degree, numFaces * 2);
7372 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007373 }
7374 }
7375 }
7376 }
7377
7378 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7379 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007380 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007381 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007382 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007383
Shuzhen Wang14415f52016-11-16 18:26:18 -08007384 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7385 histogramBins = *histBins;
7386 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7387 }
7388
7389 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007390 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7391 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007392 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007393
7394 switch (stats_data->type) {
7395 case CAM_HISTOGRAM_TYPE_BAYER:
7396 switch (stats_data->bayer_stats.data_type) {
7397 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007398 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7399 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007400 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007401 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7402 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007403 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007404 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7405 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007406 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007407 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007408 case CAM_STATS_CHANNEL_R:
7409 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007410 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7411 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007412 }
7413 break;
7414 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007415 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007416 break;
7417 }
7418
Shuzhen Wang14415f52016-11-16 18:26:18 -08007419 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007420 }
7421 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007422 }
7423
7424 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7425 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7426 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7427 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7428 }
7429
7430 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7431 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7432 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7433 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7434 }
7435
7436 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7437 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7438 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7439 CAM_MAX_SHADING_MAP_HEIGHT);
7440 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7441 CAM_MAX_SHADING_MAP_WIDTH);
7442 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7443 lensShadingMap->lens_shading, 4U * map_width * map_height);
7444 }
7445
7446 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7447 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7448 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7449 }
7450
7451 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7452 //Populate CAM_INTF_META_TONEMAP_CURVES
7453 /* ch0 = G, ch 1 = B, ch 2 = R*/
7454 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7455 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7456 tonemap->tonemap_points_cnt,
7457 CAM_MAX_TONEMAP_CURVE_SIZE);
7458 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7459 }
7460
7461 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7462 &tonemap->curves[0].tonemap_points[0][0],
7463 tonemap->tonemap_points_cnt * 2);
7464
7465 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7466 &tonemap->curves[1].tonemap_points[0][0],
7467 tonemap->tonemap_points_cnt * 2);
7468
7469 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7470 &tonemap->curves[2].tonemap_points[0][0],
7471 tonemap->tonemap_points_cnt * 2);
7472 }
7473
7474 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7475 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7476 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7477 CC_GAIN_MAX);
7478 }
7479
7480 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7481 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7482 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7483 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7484 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7485 }
7486
7487 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7488 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7489 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7490 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7491 toneCurve->tonemap_points_cnt,
7492 CAM_MAX_TONEMAP_CURVE_SIZE);
7493 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7494 }
7495 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7496 (float*)toneCurve->curve.tonemap_points,
7497 toneCurve->tonemap_points_cnt * 2);
7498 }
7499
7500 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7501 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7502 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7503 predColorCorrectionGains->gains, 4);
7504 }
7505
7506 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7507 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7508 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7509 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7510 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7511 }
7512
7513 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7514 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7515 }
7516
7517 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7518 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7519 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7520 }
7521
7522 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7523 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7524 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7525 }
7526
7527 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7528 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7529 *effectMode);
7530 if (NAME_NOT_FOUND != val) {
7531 uint8_t fwk_effectMode = (uint8_t)val;
7532 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7533 }
7534 }
7535
7536 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7537 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7538 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7539 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7540 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7541 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7542 }
7543 int32_t fwk_testPatternData[4];
7544 fwk_testPatternData[0] = testPatternData->r;
7545 fwk_testPatternData[3] = testPatternData->b;
7546 switch (gCamCapability[mCameraId]->color_arrangement) {
7547 case CAM_FILTER_ARRANGEMENT_RGGB:
7548 case CAM_FILTER_ARRANGEMENT_GRBG:
7549 fwk_testPatternData[1] = testPatternData->gr;
7550 fwk_testPatternData[2] = testPatternData->gb;
7551 break;
7552 case CAM_FILTER_ARRANGEMENT_GBRG:
7553 case CAM_FILTER_ARRANGEMENT_BGGR:
7554 fwk_testPatternData[2] = testPatternData->gr;
7555 fwk_testPatternData[1] = testPatternData->gb;
7556 break;
7557 default:
7558 LOGE("color arrangement %d is not supported",
7559 gCamCapability[mCameraId]->color_arrangement);
7560 break;
7561 }
7562 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7563 }
7564
7565 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7566 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7567 }
7568
7569 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7570 String8 str((const char *)gps_methods);
7571 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7572 }
7573
7574 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7575 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7576 }
7577
7578 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7579 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7580 }
7581
7582 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7583 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7584 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7585 }
7586
7587 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7588 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7589 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7590 }
7591
7592 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7593 int32_t fwk_thumb_size[2];
7594 fwk_thumb_size[0] = thumb_size->width;
7595 fwk_thumb_size[1] = thumb_size->height;
7596 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7597 }
7598
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007599 // Skip reprocess metadata if there is no input stream.
7600 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7601 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7602 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7603 privateData,
7604 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7605 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007606 }
7607
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007608 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007609 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007610 meteringMode, 1);
7611 }
7612
Thierry Strudel54dc9782017-02-15 12:12:10 -08007613 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7614 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7615 LOGD("hdr_scene_data: %d %f\n",
7616 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7617 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7618 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7619 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7620 &isHdr, 1);
7621 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7622 &isHdrConfidence, 1);
7623 }
7624
7625
7626
Thierry Strudel3d639192016-09-09 11:52:26 -07007627 if (metadata->is_tuning_params_valid) {
7628 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7629 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7630 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7631
7632
7633 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7634 sizeof(uint32_t));
7635 data += sizeof(uint32_t);
7636
7637 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7638 sizeof(uint32_t));
7639 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7640 data += sizeof(uint32_t);
7641
7642 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7643 sizeof(uint32_t));
7644 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7645 data += sizeof(uint32_t);
7646
7647 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7648 sizeof(uint32_t));
7649 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7650 data += sizeof(uint32_t);
7651
7652 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7653 sizeof(uint32_t));
7654 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7655 data += sizeof(uint32_t);
7656
7657 metadata->tuning_params.tuning_mod3_data_size = 0;
7658 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7659 sizeof(uint32_t));
7660 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7661 data += sizeof(uint32_t);
7662
7663 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7664 TUNING_SENSOR_DATA_MAX);
7665 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7666 count);
7667 data += count;
7668
7669 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7670 TUNING_VFE_DATA_MAX);
7671 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7672 count);
7673 data += count;
7674
7675 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7676 TUNING_CPP_DATA_MAX);
7677 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7678 count);
7679 data += count;
7680
7681 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7682 TUNING_CAC_DATA_MAX);
7683 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7684 count);
7685 data += count;
7686
7687 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7688 (int32_t *)(void *)tuning_meta_data_blob,
7689 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7690 }
7691
7692 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7693 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7694 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7695 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7696 NEUTRAL_COL_POINTS);
7697 }
7698
7699 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7700 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7701 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7702 }
7703
7704 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7705 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7706 // Adjust crop region from sensor output coordinate system to active
7707 // array coordinate system.
7708 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7709 hAeRegions->rect.width, hAeRegions->rect.height);
7710
7711 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7712 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7713 REGIONS_TUPLE_COUNT);
7714 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7715 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7716 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7717 hAeRegions->rect.height);
7718 }
7719
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007720 if (!pendingRequest.focusStateSent) {
7721 if (pendingRequest.focusStateValid) {
7722 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7723 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007724 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007725 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7726 uint8_t fwk_afState = (uint8_t) *afState;
7727 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7728 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7729 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007730 }
7731 }
7732
Thierry Strudel3d639192016-09-09 11:52:26 -07007733 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7734 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7735 }
7736
7737 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7738 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7739 }
7740
7741 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7742 uint8_t fwk_lensState = *lensState;
7743 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7744 }
7745
Thierry Strudel3d639192016-09-09 11:52:26 -07007746 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007747 uint32_t ab_mode = *hal_ab_mode;
7748 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7749 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7750 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7751 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007752 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007753 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007754 if (NAME_NOT_FOUND != val) {
7755 uint8_t fwk_ab_mode = (uint8_t)val;
7756 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7757 }
7758 }
7759
7760 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7761 int val = lookupFwkName(SCENE_MODES_MAP,
7762 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7763 if (NAME_NOT_FOUND != val) {
7764 uint8_t fwkBestshotMode = (uint8_t)val;
7765 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7766 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7767 } else {
7768 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7769 }
7770 }
7771
7772 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7773 uint8_t fwk_mode = (uint8_t) *mode;
7774 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7775 }
7776
7777 /* Constant metadata values to be update*/
7778 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7779 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7780
7781 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7782 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7783
7784 int32_t hotPixelMap[2];
7785 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7786
7787 // CDS
7788 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7789 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7790 }
7791
Thierry Strudel04e026f2016-10-10 11:27:36 -07007792 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7793 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007794 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007795 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7796 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7797 } else {
7798 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7799 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007800
7801 if(fwk_hdr != curr_hdr_state) {
7802 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7803 if(fwk_hdr)
7804 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7805 else
7806 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7807 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007808 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7809 }
7810
Thierry Strudel54dc9782017-02-15 12:12:10 -08007811 //binning correction
7812 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7813 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7814 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7815 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7816 }
7817
Thierry Strudel04e026f2016-10-10 11:27:36 -07007818 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007819 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007820 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7821 int8_t is_ir_on = 0;
7822
7823 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7824 if(is_ir_on != curr_ir_state) {
7825 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7826 if(is_ir_on)
7827 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7828 else
7829 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7830 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007831 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007832 }
7833
Thierry Strudel269c81a2016-10-12 12:13:59 -07007834 // AEC SPEED
7835 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7836 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7837 }
7838
7839 // AWB SPEED
7840 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7841 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7842 }
7843
Thierry Strudel3d639192016-09-09 11:52:26 -07007844 // TNR
7845 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7846 uint8_t tnr_enable = tnr->denoise_enable;
7847 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007848 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7849 int8_t is_tnr_on = 0;
7850
7851 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7852 if(is_tnr_on != curr_tnr_state) {
7853 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7854 if(is_tnr_on)
7855 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7856 else
7857 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7858 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007859
7860 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7861 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7862 }
7863
7864 // Reprocess crop data
7865 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7866 uint8_t cnt = crop_data->num_of_streams;
7867 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7868 // mm-qcamera-daemon only posts crop_data for streams
7869 // not linked to pproc. So no valid crop metadata is not
7870 // necessarily an error case.
7871 LOGD("No valid crop metadata entries");
7872 } else {
7873 uint32_t reproc_stream_id;
7874 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7875 LOGD("No reprocessible stream found, ignore crop data");
7876 } else {
7877 int rc = NO_ERROR;
7878 Vector<int32_t> roi_map;
7879 int32_t *crop = new int32_t[cnt*4];
7880 if (NULL == crop) {
7881 rc = NO_MEMORY;
7882 }
7883 if (NO_ERROR == rc) {
7884 int32_t streams_found = 0;
7885 for (size_t i = 0; i < cnt; i++) {
7886 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7887 if (pprocDone) {
7888 // HAL already does internal reprocessing,
7889 // either via reprocessing before JPEG encoding,
7890 // or offline postprocessing for pproc bypass case.
7891 crop[0] = 0;
7892 crop[1] = 0;
7893 crop[2] = mInputStreamInfo.dim.width;
7894 crop[3] = mInputStreamInfo.dim.height;
7895 } else {
7896 crop[0] = crop_data->crop_info[i].crop.left;
7897 crop[1] = crop_data->crop_info[i].crop.top;
7898 crop[2] = crop_data->crop_info[i].crop.width;
7899 crop[3] = crop_data->crop_info[i].crop.height;
7900 }
7901 roi_map.add(crop_data->crop_info[i].roi_map.left);
7902 roi_map.add(crop_data->crop_info[i].roi_map.top);
7903 roi_map.add(crop_data->crop_info[i].roi_map.width);
7904 roi_map.add(crop_data->crop_info[i].roi_map.height);
7905 streams_found++;
7906 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7907 crop[0], crop[1], crop[2], crop[3]);
7908 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7909 crop_data->crop_info[i].roi_map.left,
7910 crop_data->crop_info[i].roi_map.top,
7911 crop_data->crop_info[i].roi_map.width,
7912 crop_data->crop_info[i].roi_map.height);
7913 break;
7914
7915 }
7916 }
7917 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7918 &streams_found, 1);
7919 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7920 crop, (size_t)(streams_found * 4));
7921 if (roi_map.array()) {
7922 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7923 roi_map.array(), roi_map.size());
7924 }
7925 }
7926 if (crop) {
7927 delete [] crop;
7928 }
7929 }
7930 }
7931 }
7932
7933 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7934 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7935 // so hardcoding the CAC result to OFF mode.
7936 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7937 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7938 } else {
7939 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7940 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7941 *cacMode);
7942 if (NAME_NOT_FOUND != val) {
7943 uint8_t resultCacMode = (uint8_t)val;
7944 // check whether CAC result from CB is equal to Framework set CAC mode
7945 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007946 if (pendingRequest.fwkCacMode != resultCacMode) {
7947 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007948 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007949 //Check if CAC is disabled by property
7950 if (m_cacModeDisabled) {
7951 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7952 }
7953
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007954 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007955 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7956 } else {
7957 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7958 }
7959 }
7960 }
7961
7962 // Post blob of cam_cds_data through vendor tag.
7963 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7964 uint8_t cnt = cdsInfo->num_of_streams;
7965 cam_cds_data_t cdsDataOverride;
7966 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7967 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7968 cdsDataOverride.num_of_streams = 1;
7969 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7970 uint32_t reproc_stream_id;
7971 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7972 LOGD("No reprocessible stream found, ignore cds data");
7973 } else {
7974 for (size_t i = 0; i < cnt; i++) {
7975 if (cdsInfo->cds_info[i].stream_id ==
7976 reproc_stream_id) {
7977 cdsDataOverride.cds_info[0].cds_enable =
7978 cdsInfo->cds_info[i].cds_enable;
7979 break;
7980 }
7981 }
7982 }
7983 } else {
7984 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7985 }
7986 camMetadata.update(QCAMERA3_CDS_INFO,
7987 (uint8_t *)&cdsDataOverride,
7988 sizeof(cam_cds_data_t));
7989 }
7990
7991 // Ldaf calibration data
7992 if (!mLdafCalibExist) {
7993 IF_META_AVAILABLE(uint32_t, ldafCalib,
7994 CAM_INTF_META_LDAF_EXIF, metadata) {
7995 mLdafCalibExist = true;
7996 mLdafCalib[0] = ldafCalib[0];
7997 mLdafCalib[1] = ldafCalib[1];
7998 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7999 ldafCalib[0], ldafCalib[1]);
8000 }
8001 }
8002
Thierry Strudel54dc9782017-02-15 12:12:10 -08008003 // EXIF debug data through vendor tag
8004 /*
8005 * Mobicat Mask can assume 3 values:
8006 * 1 refers to Mobicat data,
8007 * 2 refers to Stats Debug and Exif Debug Data
8008 * 3 refers to Mobicat and Stats Debug Data
8009 * We want to make sure that we are sending Exif debug data
8010 * only when Mobicat Mask is 2.
8011 */
8012 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8013 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8014 (uint8_t *)(void *)mExifParams.debug_params,
8015 sizeof(mm_jpeg_debug_exif_params_t));
8016 }
8017
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008018 // Reprocess and DDM debug data through vendor tag
8019 cam_reprocess_info_t repro_info;
8020 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008021 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8022 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008023 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008024 }
8025 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8026 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008027 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008028 }
8029 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8030 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008031 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008032 }
8033 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8034 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008035 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008036 }
8037 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8038 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008039 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008040 }
8041 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008042 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008043 }
8044 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8045 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008046 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008047 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008048 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8049 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8050 }
8051 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8052 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8053 }
8054 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8055 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008056
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008057 // INSTANT AEC MODE
8058 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8059 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8060 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8061 }
8062
Shuzhen Wange763e802016-03-31 10:24:29 -07008063 // AF scene change
8064 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8065 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8066 }
8067
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008068 // Enable ZSL
8069 if (enableZsl != nullptr) {
8070 uint8_t value = *enableZsl ?
8071 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8072 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8073 }
8074
Xu Han821ea9c2017-05-23 09:00:40 -07008075 // OIS Data
8076 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8077 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8078 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8079 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8080 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8081 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8082 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8083 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8084 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8085 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8086 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8087 }
8088
Thierry Strudel3d639192016-09-09 11:52:26 -07008089 resultMetadata = camMetadata.release();
8090 return resultMetadata;
8091}
8092
8093/*===========================================================================
8094 * FUNCTION : saveExifParams
8095 *
8096 * DESCRIPTION:
8097 *
8098 * PARAMETERS :
8099 * @metadata : metadata information from callback
8100 *
8101 * RETURN : none
8102 *
8103 *==========================================================================*/
8104void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8105{
8106 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8107 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8108 if (mExifParams.debug_params) {
8109 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8110 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8111 }
8112 }
8113 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8114 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8115 if (mExifParams.debug_params) {
8116 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8117 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8118 }
8119 }
8120 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8121 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8122 if (mExifParams.debug_params) {
8123 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8124 mExifParams.debug_params->af_debug_params_valid = TRUE;
8125 }
8126 }
8127 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8128 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8129 if (mExifParams.debug_params) {
8130 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8131 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8132 }
8133 }
8134 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8135 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8136 if (mExifParams.debug_params) {
8137 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8138 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8139 }
8140 }
8141 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8142 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8143 if (mExifParams.debug_params) {
8144 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8145 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8146 }
8147 }
8148 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8149 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8150 if (mExifParams.debug_params) {
8151 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8152 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8153 }
8154 }
8155 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8156 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8157 if (mExifParams.debug_params) {
8158 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8159 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8160 }
8161 }
8162}
8163
8164/*===========================================================================
8165 * FUNCTION : get3AExifParams
8166 *
8167 * DESCRIPTION:
8168 *
8169 * PARAMETERS : none
8170 *
8171 *
8172 * RETURN : mm_jpeg_exif_params_t
8173 *
8174 *==========================================================================*/
8175mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8176{
8177 return mExifParams;
8178}
8179
8180/*===========================================================================
8181 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8182 *
8183 * DESCRIPTION:
8184 *
8185 * PARAMETERS :
8186 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008187 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8188 * urgent metadata in a batch. Always true for
8189 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008190 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008191 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8192 * i.e. even though it doesn't map to a valid partial
8193 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008194 * RETURN : camera_metadata_t*
8195 * metadata in a format specified by fwk
8196 *==========================================================================*/
8197camera_metadata_t*
8198QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008199 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008200 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008201{
8202 CameraMetadata camMetadata;
8203 camera_metadata_t *resultMetadata;
8204
Shuzhen Wang485e2442017-08-02 12:21:08 -07008205 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008206 /* In batch mode, use empty metadata if this is not the last in batch
8207 */
8208 resultMetadata = allocate_camera_metadata(0, 0);
8209 return resultMetadata;
8210 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008211
8212 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8213 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8214 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8215 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8216 }
8217
8218 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8219 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8220 &aecTrigger->trigger, 1);
8221 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8222 &aecTrigger->trigger_id, 1);
8223 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8224 aecTrigger->trigger);
8225 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8226 aecTrigger->trigger_id);
8227 }
8228
8229 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8230 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8231 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8232 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8233 }
8234
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008235 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8236 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8237 if (NAME_NOT_FOUND != val) {
8238 uint8_t fwkAfMode = (uint8_t)val;
8239 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8240 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8241 } else {
8242 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8243 val);
8244 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008245 }
8246
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008247 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8248 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8249 af_trigger->trigger);
8250 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8251 af_trigger->trigger_id);
8252
8253 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8254 mAfTrigger = *af_trigger;
8255 uint32_t fwk_AfState = (uint32_t) *afState;
8256
8257 // If this is the result for a new trigger, check if there is new early
8258 // af state. If there is, use the last af state for all results
8259 // preceding current partial frame number.
8260 for (auto & pendingRequest : mPendingRequestsList) {
8261 if (pendingRequest.frame_number < frame_number) {
8262 pendingRequest.focusStateValid = true;
8263 pendingRequest.focusState = fwk_AfState;
8264 } else if (pendingRequest.frame_number == frame_number) {
8265 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8266 // Check if early AF state for trigger exists. If yes, send AF state as
8267 // partial result for better latency.
8268 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8269 pendingRequest.focusStateSent = true;
8270 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8271 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8272 frame_number, fwkEarlyAfState);
8273 }
8274 }
8275 }
8276 }
8277 }
8278 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8279 &mAfTrigger.trigger, 1);
8280 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8281
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008282 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8283 /*af regions*/
8284 int32_t afRegions[REGIONS_TUPLE_COUNT];
8285 // Adjust crop region from sensor output coordinate system to active
8286 // array coordinate system.
8287 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8288 hAfRegions->rect.width, hAfRegions->rect.height);
8289
8290 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8291 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8292 REGIONS_TUPLE_COUNT);
8293 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8294 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8295 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8296 hAfRegions->rect.height);
8297 }
8298
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008299 // AF region confidence
8300 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8301 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8302 }
8303
Thierry Strudel3d639192016-09-09 11:52:26 -07008304 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8305 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8306 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8307 if (NAME_NOT_FOUND != val) {
8308 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8309 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8310 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8311 } else {
8312 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8313 }
8314 }
8315
8316 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8317 uint32_t aeMode = CAM_AE_MODE_MAX;
8318 int32_t flashMode = CAM_FLASH_MODE_MAX;
8319 int32_t redeye = -1;
8320 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8321 aeMode = *pAeMode;
8322 }
8323 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8324 flashMode = *pFlashMode;
8325 }
8326 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8327 redeye = *pRedeye;
8328 }
8329
8330 if (1 == redeye) {
8331 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8332 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8333 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8334 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8335 flashMode);
8336 if (NAME_NOT_FOUND != val) {
8337 fwk_aeMode = (uint8_t)val;
8338 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8339 } else {
8340 LOGE("Unsupported flash mode %d", flashMode);
8341 }
8342 } else if (aeMode == CAM_AE_MODE_ON) {
8343 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8344 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8345 } else if (aeMode == CAM_AE_MODE_OFF) {
8346 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8347 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008348 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8349 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8350 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008351 } else {
8352 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8353 "flashMode:%d, aeMode:%u!!!",
8354 redeye, flashMode, aeMode);
8355 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008356 if (mInstantAEC) {
8357 // Increment frame Idx count untill a bound reached for instant AEC.
8358 mInstantAecFrameIdxCount++;
8359 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8360 CAM_INTF_META_AEC_INFO, metadata) {
8361 LOGH("ae_params->settled = %d",ae_params->settled);
8362 // If AEC settled, or if number of frames reached bound value,
8363 // should reset instant AEC.
8364 if (ae_params->settled ||
8365 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8366 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8367 mInstantAEC = false;
8368 mResetInstantAEC = true;
8369 mInstantAecFrameIdxCount = 0;
8370 }
8371 }
8372 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008373 resultMetadata = camMetadata.release();
8374 return resultMetadata;
8375}
8376
8377/*===========================================================================
8378 * FUNCTION : dumpMetadataToFile
8379 *
8380 * DESCRIPTION: Dumps tuning metadata to file system
8381 *
8382 * PARAMETERS :
8383 * @meta : tuning metadata
8384 * @dumpFrameCount : current dump frame count
8385 * @enabled : Enable mask
8386 *
8387 *==========================================================================*/
8388void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8389 uint32_t &dumpFrameCount,
8390 bool enabled,
8391 const char *type,
8392 uint32_t frameNumber)
8393{
8394 //Some sanity checks
8395 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8396 LOGE("Tuning sensor data size bigger than expected %d: %d",
8397 meta.tuning_sensor_data_size,
8398 TUNING_SENSOR_DATA_MAX);
8399 return;
8400 }
8401
8402 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8403 LOGE("Tuning VFE data size bigger than expected %d: %d",
8404 meta.tuning_vfe_data_size,
8405 TUNING_VFE_DATA_MAX);
8406 return;
8407 }
8408
8409 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8410 LOGE("Tuning CPP data size bigger than expected %d: %d",
8411 meta.tuning_cpp_data_size,
8412 TUNING_CPP_DATA_MAX);
8413 return;
8414 }
8415
8416 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8417 LOGE("Tuning CAC data size bigger than expected %d: %d",
8418 meta.tuning_cac_data_size,
8419 TUNING_CAC_DATA_MAX);
8420 return;
8421 }
8422 //
8423
8424 if(enabled){
8425 char timeBuf[FILENAME_MAX];
8426 char buf[FILENAME_MAX];
8427 memset(buf, 0, sizeof(buf));
8428 memset(timeBuf, 0, sizeof(timeBuf));
8429 time_t current_time;
8430 struct tm * timeinfo;
8431 time (&current_time);
8432 timeinfo = localtime (&current_time);
8433 if (timeinfo != NULL) {
8434 strftime (timeBuf, sizeof(timeBuf),
8435 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8436 }
8437 String8 filePath(timeBuf);
8438 snprintf(buf,
8439 sizeof(buf),
8440 "%dm_%s_%d.bin",
8441 dumpFrameCount,
8442 type,
8443 frameNumber);
8444 filePath.append(buf);
8445 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8446 if (file_fd >= 0) {
8447 ssize_t written_len = 0;
8448 meta.tuning_data_version = TUNING_DATA_VERSION;
8449 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8450 written_len += write(file_fd, data, sizeof(uint32_t));
8451 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8452 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8453 written_len += write(file_fd, data, sizeof(uint32_t));
8454 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8455 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8456 written_len += write(file_fd, data, sizeof(uint32_t));
8457 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8458 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8459 written_len += write(file_fd, data, sizeof(uint32_t));
8460 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8461 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8462 written_len += write(file_fd, data, sizeof(uint32_t));
8463 meta.tuning_mod3_data_size = 0;
8464 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8465 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8466 written_len += write(file_fd, data, sizeof(uint32_t));
8467 size_t total_size = meta.tuning_sensor_data_size;
8468 data = (void *)((uint8_t *)&meta.data);
8469 written_len += write(file_fd, data, total_size);
8470 total_size = meta.tuning_vfe_data_size;
8471 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8472 written_len += write(file_fd, data, total_size);
8473 total_size = meta.tuning_cpp_data_size;
8474 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8475 written_len += write(file_fd, data, total_size);
8476 total_size = meta.tuning_cac_data_size;
8477 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8478 written_len += write(file_fd, data, total_size);
8479 close(file_fd);
8480 }else {
8481 LOGE("fail to open file for metadata dumping");
8482 }
8483 }
8484}
8485
8486/*===========================================================================
8487 * FUNCTION : cleanAndSortStreamInfo
8488 *
8489 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8490 * and sort them such that raw stream is at the end of the list
8491 * This is a workaround for camera daemon constraint.
8492 *
8493 * PARAMETERS : None
8494 *
8495 *==========================================================================*/
8496void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8497{
8498 List<stream_info_t *> newStreamInfo;
8499
8500 /*clean up invalid streams*/
8501 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8502 it != mStreamInfo.end();) {
8503 if(((*it)->status) == INVALID){
8504 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8505 delete channel;
8506 free(*it);
8507 it = mStreamInfo.erase(it);
8508 } else {
8509 it++;
8510 }
8511 }
8512
8513 // Move preview/video/callback/snapshot streams into newList
8514 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8515 it != mStreamInfo.end();) {
8516 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8517 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8518 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8519 newStreamInfo.push_back(*it);
8520 it = mStreamInfo.erase(it);
8521 } else
8522 it++;
8523 }
8524 // Move raw streams into newList
8525 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8526 it != mStreamInfo.end();) {
8527 newStreamInfo.push_back(*it);
8528 it = mStreamInfo.erase(it);
8529 }
8530
8531 mStreamInfo = newStreamInfo;
8532}
8533
8534/*===========================================================================
8535 * FUNCTION : extractJpegMetadata
8536 *
8537 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8538 * JPEG metadata is cached in HAL, and return as part of capture
8539 * result when metadata is returned from camera daemon.
8540 *
8541 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8542 * @request: capture request
8543 *
8544 *==========================================================================*/
8545void QCamera3HardwareInterface::extractJpegMetadata(
8546 CameraMetadata& jpegMetadata,
8547 const camera3_capture_request_t *request)
8548{
8549 CameraMetadata frame_settings;
8550 frame_settings = request->settings;
8551
8552 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8553 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8554 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8555 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8556
8557 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8558 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8559 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8560 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8561
8562 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8563 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8564 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8565 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8566
8567 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8568 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8569 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8570 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8571
8572 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8573 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8574 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8575 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8576
8577 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8578 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8579 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8580 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8581
8582 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8583 int32_t thumbnail_size[2];
8584 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8585 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8586 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8587 int32_t orientation =
8588 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008589 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008590 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8591 int32_t temp;
8592 temp = thumbnail_size[0];
8593 thumbnail_size[0] = thumbnail_size[1];
8594 thumbnail_size[1] = temp;
8595 }
8596 }
8597 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8598 thumbnail_size,
8599 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8600 }
8601
8602}
8603
8604/*===========================================================================
8605 * FUNCTION : convertToRegions
8606 *
8607 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8608 *
8609 * PARAMETERS :
8610 * @rect : cam_rect_t struct to convert
8611 * @region : int32_t destination array
8612 * @weight : if we are converting from cam_area_t, weight is valid
8613 * else weight = -1
8614 *
8615 *==========================================================================*/
8616void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8617 int32_t *region, int weight)
8618{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008619 region[FACE_LEFT] = rect.left;
8620 region[FACE_TOP] = rect.top;
8621 region[FACE_RIGHT] = rect.left + rect.width;
8622 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008623 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008624 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008625 }
8626}
8627
8628/*===========================================================================
8629 * FUNCTION : convertFromRegions
8630 *
8631 * DESCRIPTION: helper method to convert from array to cam_rect_t
8632 *
8633 * PARAMETERS :
8634 * @rect : cam_rect_t struct to convert
8635 * @region : int32_t destination array
8636 * @weight : if we are converting from cam_area_t, weight is valid
8637 * else weight = -1
8638 *
8639 *==========================================================================*/
8640void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008641 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008642{
Thierry Strudel3d639192016-09-09 11:52:26 -07008643 int32_t x_min = frame_settings.find(tag).data.i32[0];
8644 int32_t y_min = frame_settings.find(tag).data.i32[1];
8645 int32_t x_max = frame_settings.find(tag).data.i32[2];
8646 int32_t y_max = frame_settings.find(tag).data.i32[3];
8647 roi.weight = frame_settings.find(tag).data.i32[4];
8648 roi.rect.left = x_min;
8649 roi.rect.top = y_min;
8650 roi.rect.width = x_max - x_min;
8651 roi.rect.height = y_max - y_min;
8652}
8653
8654/*===========================================================================
8655 * FUNCTION : resetIfNeededROI
8656 *
8657 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8658 * crop region
8659 *
8660 * PARAMETERS :
8661 * @roi : cam_area_t struct to resize
8662 * @scalerCropRegion : cam_crop_region_t region to compare against
8663 *
8664 *
8665 *==========================================================================*/
8666bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8667 const cam_crop_region_t* scalerCropRegion)
8668{
8669 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8670 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8671 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8672 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8673
8674 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8675 * without having this check the calculations below to validate if the roi
8676 * is inside scalar crop region will fail resulting in the roi not being
8677 * reset causing algorithm to continue to use stale roi window
8678 */
8679 if (roi->weight == 0) {
8680 return true;
8681 }
8682
8683 if ((roi_x_max < scalerCropRegion->left) ||
8684 // right edge of roi window is left of scalar crop's left edge
8685 (roi_y_max < scalerCropRegion->top) ||
8686 // bottom edge of roi window is above scalar crop's top edge
8687 (roi->rect.left > crop_x_max) ||
8688 // left edge of roi window is beyond(right) of scalar crop's right edge
8689 (roi->rect.top > crop_y_max)){
8690 // top edge of roi windo is above scalar crop's top edge
8691 return false;
8692 }
8693 if (roi->rect.left < scalerCropRegion->left) {
8694 roi->rect.left = scalerCropRegion->left;
8695 }
8696 if (roi->rect.top < scalerCropRegion->top) {
8697 roi->rect.top = scalerCropRegion->top;
8698 }
8699 if (roi_x_max > crop_x_max) {
8700 roi_x_max = crop_x_max;
8701 }
8702 if (roi_y_max > crop_y_max) {
8703 roi_y_max = crop_y_max;
8704 }
8705 roi->rect.width = roi_x_max - roi->rect.left;
8706 roi->rect.height = roi_y_max - roi->rect.top;
8707 return true;
8708}
8709
8710/*===========================================================================
8711 * FUNCTION : convertLandmarks
8712 *
8713 * DESCRIPTION: helper method to extract the landmarks from face detection info
8714 *
8715 * PARAMETERS :
8716 * @landmark_data : input landmark data to be converted
8717 * @landmarks : int32_t destination array
8718 *
8719 *
8720 *==========================================================================*/
8721void QCamera3HardwareInterface::convertLandmarks(
8722 cam_face_landmarks_info_t landmark_data,
8723 int32_t *landmarks)
8724{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008725 if (landmark_data.is_left_eye_valid) {
8726 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8727 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8728 } else {
8729 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8730 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8731 }
8732
8733 if (landmark_data.is_right_eye_valid) {
8734 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8735 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8736 } else {
8737 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8738 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8739 }
8740
8741 if (landmark_data.is_mouth_valid) {
8742 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8743 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8744 } else {
8745 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8746 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8747 }
8748}
8749
8750/*===========================================================================
8751 * FUNCTION : setInvalidLandmarks
8752 *
8753 * DESCRIPTION: helper method to set invalid landmarks
8754 *
8755 * PARAMETERS :
8756 * @landmarks : int32_t destination array
8757 *
8758 *
8759 *==========================================================================*/
8760void QCamera3HardwareInterface::setInvalidLandmarks(
8761 int32_t *landmarks)
8762{
8763 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8764 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8765 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8766 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8767 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8768 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008769}
8770
8771#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008772
8773/*===========================================================================
8774 * FUNCTION : getCapabilities
8775 *
8776 * DESCRIPTION: query camera capability from back-end
8777 *
8778 * PARAMETERS :
8779 * @ops : mm-interface ops structure
8780 * @cam_handle : camera handle for which we need capability
8781 *
8782 * RETURN : ptr type of capability structure
8783 * capability for success
8784 * NULL for failure
8785 *==========================================================================*/
8786cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8787 uint32_t cam_handle)
8788{
8789 int rc = NO_ERROR;
8790 QCamera3HeapMemory *capabilityHeap = NULL;
8791 cam_capability_t *cap_ptr = NULL;
8792
8793 if (ops == NULL) {
8794 LOGE("Invalid arguments");
8795 return NULL;
8796 }
8797
8798 capabilityHeap = new QCamera3HeapMemory(1);
8799 if (capabilityHeap == NULL) {
8800 LOGE("creation of capabilityHeap failed");
8801 return NULL;
8802 }
8803
8804 /* Allocate memory for capability buffer */
8805 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8806 if(rc != OK) {
8807 LOGE("No memory for cappability");
8808 goto allocate_failed;
8809 }
8810
8811 /* Map memory for capability buffer */
8812 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8813
8814 rc = ops->map_buf(cam_handle,
8815 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8816 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8817 if(rc < 0) {
8818 LOGE("failed to map capability buffer");
8819 rc = FAILED_TRANSACTION;
8820 goto map_failed;
8821 }
8822
8823 /* Query Capability */
8824 rc = ops->query_capability(cam_handle);
8825 if(rc < 0) {
8826 LOGE("failed to query capability");
8827 rc = FAILED_TRANSACTION;
8828 goto query_failed;
8829 }
8830
8831 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8832 if (cap_ptr == NULL) {
8833 LOGE("out of memory");
8834 rc = NO_MEMORY;
8835 goto query_failed;
8836 }
8837
8838 memset(cap_ptr, 0, sizeof(cam_capability_t));
8839 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8840
8841 int index;
8842 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8843 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8844 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8845 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8846 }
8847
8848query_failed:
8849 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8850map_failed:
8851 capabilityHeap->deallocate();
8852allocate_failed:
8853 delete capabilityHeap;
8854
8855 if (rc != NO_ERROR) {
8856 return NULL;
8857 } else {
8858 return cap_ptr;
8859 }
8860}
8861
Thierry Strudel3d639192016-09-09 11:52:26 -07008862/*===========================================================================
8863 * FUNCTION : initCapabilities
8864 *
8865 * DESCRIPTION: initialize camera capabilities in static data struct
8866 *
8867 * PARAMETERS :
8868 * @cameraId : camera Id
8869 *
8870 * RETURN : int32_t type of status
8871 * NO_ERROR -- success
8872 * none-zero failure code
8873 *==========================================================================*/
8874int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8875{
8876 int rc = 0;
8877 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008878 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008879
8880 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8881 if (rc) {
8882 LOGE("camera_open failed. rc = %d", rc);
8883 goto open_failed;
8884 }
8885 if (!cameraHandle) {
8886 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8887 goto open_failed;
8888 }
8889
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008890 handle = get_main_camera_handle(cameraHandle->camera_handle);
8891 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8892 if (gCamCapability[cameraId] == NULL) {
8893 rc = FAILED_TRANSACTION;
8894 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008895 }
8896
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008897 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008898 if (is_dual_camera_by_idx(cameraId)) {
8899 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8900 gCamCapability[cameraId]->aux_cam_cap =
8901 getCapabilities(cameraHandle->ops, handle);
8902 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8903 rc = FAILED_TRANSACTION;
8904 free(gCamCapability[cameraId]);
8905 goto failed_op;
8906 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008907
8908 // Copy the main camera capability to main_cam_cap struct
8909 gCamCapability[cameraId]->main_cam_cap =
8910 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8911 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8912 LOGE("out of memory");
8913 rc = NO_MEMORY;
8914 goto failed_op;
8915 }
8916 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8917 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008918 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008919failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008920 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8921 cameraHandle = NULL;
8922open_failed:
8923 return rc;
8924}
8925
8926/*==========================================================================
8927 * FUNCTION : get3Aversion
8928 *
8929 * DESCRIPTION: get the Q3A S/W version
8930 *
8931 * PARAMETERS :
8932 * @sw_version: Reference of Q3A structure which will hold version info upon
8933 * return
8934 *
8935 * RETURN : None
8936 *
8937 *==========================================================================*/
8938void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8939{
8940 if(gCamCapability[mCameraId])
8941 sw_version = gCamCapability[mCameraId]->q3a_version;
8942 else
8943 LOGE("Capability structure NULL!");
8944}
8945
8946
8947/*===========================================================================
8948 * FUNCTION : initParameters
8949 *
8950 * DESCRIPTION: initialize camera parameters
8951 *
8952 * PARAMETERS :
8953 *
8954 * RETURN : int32_t type of status
8955 * NO_ERROR -- success
8956 * none-zero failure code
8957 *==========================================================================*/
8958int QCamera3HardwareInterface::initParameters()
8959{
8960 int rc = 0;
8961
8962 //Allocate Set Param Buffer
8963 mParamHeap = new QCamera3HeapMemory(1);
8964 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8965 if(rc != OK) {
8966 rc = NO_MEMORY;
8967 LOGE("Failed to allocate SETPARM Heap memory");
8968 delete mParamHeap;
8969 mParamHeap = NULL;
8970 return rc;
8971 }
8972
8973 //Map memory for parameters buffer
8974 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8975 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8976 mParamHeap->getFd(0),
8977 sizeof(metadata_buffer_t),
8978 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8979 if(rc < 0) {
8980 LOGE("failed to map SETPARM buffer");
8981 rc = FAILED_TRANSACTION;
8982 mParamHeap->deallocate();
8983 delete mParamHeap;
8984 mParamHeap = NULL;
8985 return rc;
8986 }
8987
8988 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8989
8990 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8991 return rc;
8992}
8993
8994/*===========================================================================
8995 * FUNCTION : deinitParameters
8996 *
8997 * DESCRIPTION: de-initialize camera parameters
8998 *
8999 * PARAMETERS :
9000 *
9001 * RETURN : NONE
9002 *==========================================================================*/
9003void QCamera3HardwareInterface::deinitParameters()
9004{
9005 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9006 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9007
9008 mParamHeap->deallocate();
9009 delete mParamHeap;
9010 mParamHeap = NULL;
9011
9012 mParameters = NULL;
9013
9014 free(mPrevParameters);
9015 mPrevParameters = NULL;
9016}
9017
9018/*===========================================================================
9019 * FUNCTION : calcMaxJpegSize
9020 *
9021 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9022 *
9023 * PARAMETERS :
9024 *
9025 * RETURN : max_jpeg_size
9026 *==========================================================================*/
9027size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9028{
9029 size_t max_jpeg_size = 0;
9030 size_t temp_width, temp_height;
9031 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9032 MAX_SIZES_CNT);
9033 for (size_t i = 0; i < count; i++) {
9034 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9035 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9036 if (temp_width * temp_height > max_jpeg_size ) {
9037 max_jpeg_size = temp_width * temp_height;
9038 }
9039 }
9040 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9041 return max_jpeg_size;
9042}
9043
9044/*===========================================================================
9045 * FUNCTION : getMaxRawSize
9046 *
9047 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9048 *
9049 * PARAMETERS :
9050 *
9051 * RETURN : Largest supported Raw Dimension
9052 *==========================================================================*/
9053cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9054{
9055 int max_width = 0;
9056 cam_dimension_t maxRawSize;
9057
9058 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9059 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9060 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9061 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9062 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9063 }
9064 }
9065 return maxRawSize;
9066}
9067
9068
9069/*===========================================================================
9070 * FUNCTION : calcMaxJpegDim
9071 *
9072 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9073 *
9074 * PARAMETERS :
9075 *
9076 * RETURN : max_jpeg_dim
9077 *==========================================================================*/
9078cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9079{
9080 cam_dimension_t max_jpeg_dim;
9081 cam_dimension_t curr_jpeg_dim;
9082 max_jpeg_dim.width = 0;
9083 max_jpeg_dim.height = 0;
9084 curr_jpeg_dim.width = 0;
9085 curr_jpeg_dim.height = 0;
9086 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9087 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9088 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9089 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9090 max_jpeg_dim.width * max_jpeg_dim.height ) {
9091 max_jpeg_dim.width = curr_jpeg_dim.width;
9092 max_jpeg_dim.height = curr_jpeg_dim.height;
9093 }
9094 }
9095 return max_jpeg_dim;
9096}
9097
9098/*===========================================================================
9099 * FUNCTION : addStreamConfig
9100 *
9101 * DESCRIPTION: adds the stream configuration to the array
9102 *
9103 * PARAMETERS :
9104 * @available_stream_configs : pointer to stream configuration array
9105 * @scalar_format : scalar format
9106 * @dim : configuration dimension
9107 * @config_type : input or output configuration type
9108 *
9109 * RETURN : NONE
9110 *==========================================================================*/
9111void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9112 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9113{
9114 available_stream_configs.add(scalar_format);
9115 available_stream_configs.add(dim.width);
9116 available_stream_configs.add(dim.height);
9117 available_stream_configs.add(config_type);
9118}
9119
9120/*===========================================================================
9121 * FUNCTION : suppportBurstCapture
9122 *
9123 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9124 *
9125 * PARAMETERS :
9126 * @cameraId : camera Id
9127 *
9128 * RETURN : true if camera supports BURST_CAPTURE
9129 * false otherwise
9130 *==========================================================================*/
9131bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9132{
9133 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9134 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9135 const int32_t highResWidth = 3264;
9136 const int32_t highResHeight = 2448;
9137
9138 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9139 // Maximum resolution images cannot be captured at >= 10fps
9140 // -> not supporting BURST_CAPTURE
9141 return false;
9142 }
9143
9144 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9145 // Maximum resolution images can be captured at >= 20fps
9146 // --> supporting BURST_CAPTURE
9147 return true;
9148 }
9149
9150 // Find the smallest highRes resolution, or largest resolution if there is none
9151 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9152 MAX_SIZES_CNT);
9153 size_t highRes = 0;
9154 while ((highRes + 1 < totalCnt) &&
9155 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9156 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9157 highResWidth * highResHeight)) {
9158 highRes++;
9159 }
9160 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9161 return true;
9162 } else {
9163 return false;
9164 }
9165}
9166
9167/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009168 * FUNCTION : getPDStatIndex
9169 *
9170 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9171 *
9172 * PARAMETERS :
9173 * @caps : camera capabilities
9174 *
9175 * RETURN : int32_t type
9176 * non-negative - on success
9177 * -1 - on failure
9178 *==========================================================================*/
9179int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9180 if (nullptr == caps) {
9181 return -1;
9182 }
9183
9184 uint32_t metaRawCount = caps->meta_raw_channel_count;
9185 int32_t ret = -1;
9186 for (size_t i = 0; i < metaRawCount; i++) {
9187 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9188 ret = i;
9189 break;
9190 }
9191 }
9192
9193 return ret;
9194}
9195
9196/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009197 * FUNCTION : initStaticMetadata
9198 *
9199 * DESCRIPTION: initialize the static metadata
9200 *
9201 * PARAMETERS :
9202 * @cameraId : camera Id
9203 *
9204 * RETURN : int32_t type of status
9205 * 0 -- success
9206 * non-zero failure code
9207 *==========================================================================*/
9208int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9209{
9210 int rc = 0;
9211 CameraMetadata staticInfo;
9212 size_t count = 0;
9213 bool limitedDevice = false;
9214 char prop[PROPERTY_VALUE_MAX];
9215 bool supportBurst = false;
9216
9217 supportBurst = supportBurstCapture(cameraId);
9218
9219 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9220 * guaranteed or if min fps of max resolution is less than 20 fps, its
9221 * advertised as limited device*/
9222 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9223 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9224 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9225 !supportBurst;
9226
9227 uint8_t supportedHwLvl = limitedDevice ?
9228 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009229#ifndef USE_HAL_3_3
9230 // LEVEL_3 - This device will support level 3.
9231 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9232#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009233 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009234#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009235
9236 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9237 &supportedHwLvl, 1);
9238
9239 bool facingBack = false;
9240 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9241 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9242 facingBack = true;
9243 }
9244 /*HAL 3 only*/
9245 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9246 &gCamCapability[cameraId]->min_focus_distance, 1);
9247
9248 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9249 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9250
9251 /*should be using focal lengths but sensor doesn't provide that info now*/
9252 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9253 &gCamCapability[cameraId]->focal_length,
9254 1);
9255
9256 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9257 gCamCapability[cameraId]->apertures,
9258 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9259
9260 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9261 gCamCapability[cameraId]->filter_densities,
9262 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9263
9264
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009265 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9266 size_t mode_count =
9267 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9268 for (size_t i = 0; i < mode_count; i++) {
9269 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9270 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009271 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009272 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009273
9274 int32_t lens_shading_map_size[] = {
9275 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9276 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9277 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9278 lens_shading_map_size,
9279 sizeof(lens_shading_map_size)/sizeof(int32_t));
9280
9281 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9282 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9283
9284 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9285 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9286
9287 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9288 &gCamCapability[cameraId]->max_frame_duration, 1);
9289
9290 camera_metadata_rational baseGainFactor = {
9291 gCamCapability[cameraId]->base_gain_factor.numerator,
9292 gCamCapability[cameraId]->base_gain_factor.denominator};
9293 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9294 &baseGainFactor, 1);
9295
9296 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9297 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9298
9299 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9300 gCamCapability[cameraId]->pixel_array_size.height};
9301 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9302 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9303
9304 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9305 gCamCapability[cameraId]->active_array_size.top,
9306 gCamCapability[cameraId]->active_array_size.width,
9307 gCamCapability[cameraId]->active_array_size.height};
9308 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9309 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9310
9311 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9312 &gCamCapability[cameraId]->white_level, 1);
9313
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009314 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9315 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9316 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009317 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009318 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009319
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009320#ifndef USE_HAL_3_3
9321 bool hasBlackRegions = false;
9322 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9323 LOGW("black_region_count: %d is bounded to %d",
9324 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9325 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9326 }
9327 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9328 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9329 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9330 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9331 }
9332 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9333 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9334 hasBlackRegions = true;
9335 }
9336#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009337 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9338 &gCamCapability[cameraId]->flash_charge_duration, 1);
9339
9340 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9341 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9342
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009343 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9344 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9345 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009346 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9347 &timestampSource, 1);
9348
Thierry Strudel54dc9782017-02-15 12:12:10 -08009349 //update histogram vendor data
9350 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009351 &gCamCapability[cameraId]->histogram_size, 1);
9352
Thierry Strudel54dc9782017-02-15 12:12:10 -08009353 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009354 &gCamCapability[cameraId]->max_histogram_count, 1);
9355
Shuzhen Wang14415f52016-11-16 18:26:18 -08009356 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9357 //so that app can request fewer number of bins than the maximum supported.
9358 std::vector<int32_t> histBins;
9359 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9360 histBins.push_back(maxHistBins);
9361 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9362 (maxHistBins & 0x1) == 0) {
9363 histBins.push_back(maxHistBins >> 1);
9364 maxHistBins >>= 1;
9365 }
9366 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9367 histBins.data(), histBins.size());
9368
Thierry Strudel3d639192016-09-09 11:52:26 -07009369 int32_t sharpness_map_size[] = {
9370 gCamCapability[cameraId]->sharpness_map_size.width,
9371 gCamCapability[cameraId]->sharpness_map_size.height};
9372
9373 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9374 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9375
9376 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9377 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9378
Emilian Peev0f3c3162017-03-15 12:57:46 +00009379 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9380 if (0 <= indexPD) {
9381 // Advertise PD stats data as part of the Depth capabilities
9382 int32_t depthWidth =
9383 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9384 int32_t depthHeight =
9385 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009386 int32_t depthStride =
9387 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009388 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9389 assert(0 < depthSamplesCount);
9390 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9391 &depthSamplesCount, 1);
9392
9393 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9394 depthHeight,
9395 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9396 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9397 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9398 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9399 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9400
9401 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9402 depthHeight, 33333333,
9403 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9404 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9405 depthMinDuration,
9406 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9407
9408 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9409 depthHeight, 0,
9410 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9411 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9412 depthStallDuration,
9413 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9414
9415 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9416 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009417
9418 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9419 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9420 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009421 }
9422
Thierry Strudel3d639192016-09-09 11:52:26 -07009423 int32_t scalar_formats[] = {
9424 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9425 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9426 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9427 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9428 HAL_PIXEL_FORMAT_RAW10,
9429 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009430 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9431 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9432 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009433
9434 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9435 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9436 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9437 count, MAX_SIZES_CNT, available_processed_sizes);
9438 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9439 available_processed_sizes, count * 2);
9440
9441 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9442 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9443 makeTable(gCamCapability[cameraId]->raw_dim,
9444 count, MAX_SIZES_CNT, available_raw_sizes);
9445 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9446 available_raw_sizes, count * 2);
9447
9448 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9449 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9450 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9451 count, MAX_SIZES_CNT, available_fps_ranges);
9452 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9453 available_fps_ranges, count * 2);
9454
9455 camera_metadata_rational exposureCompensationStep = {
9456 gCamCapability[cameraId]->exp_compensation_step.numerator,
9457 gCamCapability[cameraId]->exp_compensation_step.denominator};
9458 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9459 &exposureCompensationStep, 1);
9460
9461 Vector<uint8_t> availableVstabModes;
9462 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9463 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009464 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009465 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009466 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009467 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009468 count = IS_TYPE_MAX;
9469 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9470 for (size_t i = 0; i < count; i++) {
9471 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9472 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9473 eisSupported = true;
9474 break;
9475 }
9476 }
9477 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009478 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9479 }
9480 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9481 availableVstabModes.array(), availableVstabModes.size());
9482
9483 /*HAL 1 and HAL 3 common*/
9484 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9485 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9486 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009487 // Cap the max zoom to the max preferred value
9488 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009489 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9490 &maxZoom, 1);
9491
9492 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9493 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9494
9495 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9496 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9497 max3aRegions[2] = 0; /* AF not supported */
9498 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9499 max3aRegions, 3);
9500
9501 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9502 memset(prop, 0, sizeof(prop));
9503 property_get("persist.camera.facedetect", prop, "1");
9504 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9505 LOGD("Support face detection mode: %d",
9506 supportedFaceDetectMode);
9507
9508 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009509 /* support mode should be OFF if max number of face is 0 */
9510 if (maxFaces <= 0) {
9511 supportedFaceDetectMode = 0;
9512 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009513 Vector<uint8_t> availableFaceDetectModes;
9514 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9515 if (supportedFaceDetectMode == 1) {
9516 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9517 } else if (supportedFaceDetectMode == 2) {
9518 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9519 } else if (supportedFaceDetectMode == 3) {
9520 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9521 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9522 } else {
9523 maxFaces = 0;
9524 }
9525 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9526 availableFaceDetectModes.array(),
9527 availableFaceDetectModes.size());
9528 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9529 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009530 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9531 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9532 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009533
9534 int32_t exposureCompensationRange[] = {
9535 gCamCapability[cameraId]->exposure_compensation_min,
9536 gCamCapability[cameraId]->exposure_compensation_max};
9537 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9538 exposureCompensationRange,
9539 sizeof(exposureCompensationRange)/sizeof(int32_t));
9540
9541 uint8_t lensFacing = (facingBack) ?
9542 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9543 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9544
9545 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9546 available_thumbnail_sizes,
9547 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9548
9549 /*all sizes will be clubbed into this tag*/
9550 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9551 /*android.scaler.availableStreamConfigurations*/
9552 Vector<int32_t> available_stream_configs;
9553 cam_dimension_t active_array_dim;
9554 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9555 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009556
9557 /*advertise list of input dimensions supported based on below property.
9558 By default all sizes upto 5MP will be advertised.
9559 Note that the setprop resolution format should be WxH.
9560 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9561 To list all supported sizes, setprop needs to be set with "0x0" */
9562 cam_dimension_t minInputSize = {2592,1944}; //5MP
9563 memset(prop, 0, sizeof(prop));
9564 property_get("persist.camera.input.minsize", prop, "2592x1944");
9565 if (strlen(prop) > 0) {
9566 char *saveptr = NULL;
9567 char *token = strtok_r(prop, "x", &saveptr);
9568 if (token != NULL) {
9569 minInputSize.width = atoi(token);
9570 }
9571 token = strtok_r(NULL, "x", &saveptr);
9572 if (token != NULL) {
9573 minInputSize.height = atoi(token);
9574 }
9575 }
9576
Thierry Strudel3d639192016-09-09 11:52:26 -07009577 /* Add input/output stream configurations for each scalar formats*/
9578 for (size_t j = 0; j < scalar_formats_count; j++) {
9579 switch (scalar_formats[j]) {
9580 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9581 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9582 case HAL_PIXEL_FORMAT_RAW10:
9583 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9584 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9585 addStreamConfig(available_stream_configs, scalar_formats[j],
9586 gCamCapability[cameraId]->raw_dim[i],
9587 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9588 }
9589 break;
9590 case HAL_PIXEL_FORMAT_BLOB:
9591 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9592 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9593 addStreamConfig(available_stream_configs, scalar_formats[j],
9594 gCamCapability[cameraId]->picture_sizes_tbl[i],
9595 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9596 }
9597 break;
9598 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9599 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9600 default:
9601 cam_dimension_t largest_picture_size;
9602 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9603 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9604 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9605 addStreamConfig(available_stream_configs, scalar_formats[j],
9606 gCamCapability[cameraId]->picture_sizes_tbl[i],
9607 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009608 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009609 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9610 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009611 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9612 >= minInputSize.width) || (gCamCapability[cameraId]->
9613 picture_sizes_tbl[i].height >= minInputSize.height)) {
9614 addStreamConfig(available_stream_configs, scalar_formats[j],
9615 gCamCapability[cameraId]->picture_sizes_tbl[i],
9616 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9617 }
9618 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009619 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009620
Thierry Strudel3d639192016-09-09 11:52:26 -07009621 break;
9622 }
9623 }
9624
9625 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9626 available_stream_configs.array(), available_stream_configs.size());
9627 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9628 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9629
9630 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9631 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9632
9633 /* android.scaler.availableMinFrameDurations */
9634 Vector<int64_t> available_min_durations;
9635 for (size_t j = 0; j < scalar_formats_count; j++) {
9636 switch (scalar_formats[j]) {
9637 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9638 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9639 case HAL_PIXEL_FORMAT_RAW10:
9640 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9641 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9642 available_min_durations.add(scalar_formats[j]);
9643 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9644 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9645 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9646 }
9647 break;
9648 default:
9649 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9650 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9651 available_min_durations.add(scalar_formats[j]);
9652 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9653 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9654 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9655 }
9656 break;
9657 }
9658 }
9659 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9660 available_min_durations.array(), available_min_durations.size());
9661
9662 Vector<int32_t> available_hfr_configs;
9663 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9664 int32_t fps = 0;
9665 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9666 case CAM_HFR_MODE_60FPS:
9667 fps = 60;
9668 break;
9669 case CAM_HFR_MODE_90FPS:
9670 fps = 90;
9671 break;
9672 case CAM_HFR_MODE_120FPS:
9673 fps = 120;
9674 break;
9675 case CAM_HFR_MODE_150FPS:
9676 fps = 150;
9677 break;
9678 case CAM_HFR_MODE_180FPS:
9679 fps = 180;
9680 break;
9681 case CAM_HFR_MODE_210FPS:
9682 fps = 210;
9683 break;
9684 case CAM_HFR_MODE_240FPS:
9685 fps = 240;
9686 break;
9687 case CAM_HFR_MODE_480FPS:
9688 fps = 480;
9689 break;
9690 case CAM_HFR_MODE_OFF:
9691 case CAM_HFR_MODE_MAX:
9692 default:
9693 break;
9694 }
9695
9696 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9697 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9698 /* For each HFR frame rate, need to advertise one variable fps range
9699 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9700 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9701 * set by the app. When video recording is started, [120, 120] is
9702 * set. This way sensor configuration does not change when recording
9703 * is started */
9704
9705 /* (width, height, fps_min, fps_max, batch_size_max) */
9706 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9707 j < MAX_SIZES_CNT; j++) {
9708 available_hfr_configs.add(
9709 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9710 available_hfr_configs.add(
9711 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9712 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9713 available_hfr_configs.add(fps);
9714 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9715
9716 /* (width, height, fps_min, fps_max, batch_size_max) */
9717 available_hfr_configs.add(
9718 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9719 available_hfr_configs.add(
9720 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9721 available_hfr_configs.add(fps);
9722 available_hfr_configs.add(fps);
9723 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9724 }
9725 }
9726 }
9727 //Advertise HFR capability only if the property is set
9728 memset(prop, 0, sizeof(prop));
9729 property_get("persist.camera.hal3hfr.enable", prop, "1");
9730 uint8_t hfrEnable = (uint8_t)atoi(prop);
9731
9732 if(hfrEnable && available_hfr_configs.array()) {
9733 staticInfo.update(
9734 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9735 available_hfr_configs.array(), available_hfr_configs.size());
9736 }
9737
9738 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9739 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9740 &max_jpeg_size, 1);
9741
9742 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9743 size_t size = 0;
9744 count = CAM_EFFECT_MODE_MAX;
9745 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9746 for (size_t i = 0; i < count; i++) {
9747 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9748 gCamCapability[cameraId]->supported_effects[i]);
9749 if (NAME_NOT_FOUND != val) {
9750 avail_effects[size] = (uint8_t)val;
9751 size++;
9752 }
9753 }
9754 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9755 avail_effects,
9756 size);
9757
9758 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9759 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9760 size_t supported_scene_modes_cnt = 0;
9761 count = CAM_SCENE_MODE_MAX;
9762 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9763 for (size_t i = 0; i < count; i++) {
9764 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9765 CAM_SCENE_MODE_OFF) {
9766 int val = lookupFwkName(SCENE_MODES_MAP,
9767 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9768 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009769
Thierry Strudel3d639192016-09-09 11:52:26 -07009770 if (NAME_NOT_FOUND != val) {
9771 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9772 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9773 supported_scene_modes_cnt++;
9774 }
9775 }
9776 }
9777 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9778 avail_scene_modes,
9779 supported_scene_modes_cnt);
9780
9781 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9782 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9783 supported_scene_modes_cnt,
9784 CAM_SCENE_MODE_MAX,
9785 scene_mode_overrides,
9786 supported_indexes,
9787 cameraId);
9788
9789 if (supported_scene_modes_cnt == 0) {
9790 supported_scene_modes_cnt = 1;
9791 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9792 }
9793
9794 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9795 scene_mode_overrides, supported_scene_modes_cnt * 3);
9796
9797 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9798 ANDROID_CONTROL_MODE_AUTO,
9799 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9800 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9801 available_control_modes,
9802 3);
9803
9804 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9805 size = 0;
9806 count = CAM_ANTIBANDING_MODE_MAX;
9807 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9808 for (size_t i = 0; i < count; i++) {
9809 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9810 gCamCapability[cameraId]->supported_antibandings[i]);
9811 if (NAME_NOT_FOUND != val) {
9812 avail_antibanding_modes[size] = (uint8_t)val;
9813 size++;
9814 }
9815
9816 }
9817 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9818 avail_antibanding_modes,
9819 size);
9820
9821 uint8_t avail_abberation_modes[] = {
9822 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9823 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9824 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9825 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9826 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9827 if (0 == count) {
9828 // If no aberration correction modes are available for a device, this advertise OFF mode
9829 size = 1;
9830 } else {
9831 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9832 // So, advertize all 3 modes if atleast any one mode is supported as per the
9833 // new M requirement
9834 size = 3;
9835 }
9836 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9837 avail_abberation_modes,
9838 size);
9839
9840 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9841 size = 0;
9842 count = CAM_FOCUS_MODE_MAX;
9843 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9844 for (size_t i = 0; i < count; i++) {
9845 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9846 gCamCapability[cameraId]->supported_focus_modes[i]);
9847 if (NAME_NOT_FOUND != val) {
9848 avail_af_modes[size] = (uint8_t)val;
9849 size++;
9850 }
9851 }
9852 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9853 avail_af_modes,
9854 size);
9855
9856 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9857 size = 0;
9858 count = CAM_WB_MODE_MAX;
9859 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9860 for (size_t i = 0; i < count; i++) {
9861 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9862 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9863 gCamCapability[cameraId]->supported_white_balances[i]);
9864 if (NAME_NOT_FOUND != val) {
9865 avail_awb_modes[size] = (uint8_t)val;
9866 size++;
9867 }
9868 }
9869 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9870 avail_awb_modes,
9871 size);
9872
9873 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9874 count = CAM_FLASH_FIRING_LEVEL_MAX;
9875 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9876 count);
9877 for (size_t i = 0; i < count; i++) {
9878 available_flash_levels[i] =
9879 gCamCapability[cameraId]->supported_firing_levels[i];
9880 }
9881 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9882 available_flash_levels, count);
9883
9884 uint8_t flashAvailable;
9885 if (gCamCapability[cameraId]->flash_available)
9886 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9887 else
9888 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9889 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9890 &flashAvailable, 1);
9891
9892 Vector<uint8_t> avail_ae_modes;
9893 count = CAM_AE_MODE_MAX;
9894 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9895 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009896 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9897 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9898 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9899 }
9900 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009901 }
9902 if (flashAvailable) {
9903 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9904 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9905 }
9906 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9907 avail_ae_modes.array(),
9908 avail_ae_modes.size());
9909
9910 int32_t sensitivity_range[2];
9911 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9912 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9913 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9914 sensitivity_range,
9915 sizeof(sensitivity_range) / sizeof(int32_t));
9916
9917 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9918 &gCamCapability[cameraId]->max_analog_sensitivity,
9919 1);
9920
9921 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9922 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9923 &sensor_orientation,
9924 1);
9925
9926 int32_t max_output_streams[] = {
9927 MAX_STALLING_STREAMS,
9928 MAX_PROCESSED_STREAMS,
9929 MAX_RAW_STREAMS};
9930 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9931 max_output_streams,
9932 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9933
9934 uint8_t avail_leds = 0;
9935 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9936 &avail_leds, 0);
9937
9938 uint8_t focus_dist_calibrated;
9939 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9940 gCamCapability[cameraId]->focus_dist_calibrated);
9941 if (NAME_NOT_FOUND != val) {
9942 focus_dist_calibrated = (uint8_t)val;
9943 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9944 &focus_dist_calibrated, 1);
9945 }
9946
9947 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9948 size = 0;
9949 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9950 MAX_TEST_PATTERN_CNT);
9951 for (size_t i = 0; i < count; i++) {
9952 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9953 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9954 if (NAME_NOT_FOUND != testpatternMode) {
9955 avail_testpattern_modes[size] = testpatternMode;
9956 size++;
9957 }
9958 }
9959 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9960 avail_testpattern_modes,
9961 size);
9962
9963 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9964 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9965 &max_pipeline_depth,
9966 1);
9967
9968 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9969 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9970 &partial_result_count,
9971 1);
9972
9973 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9974 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9975
9976 Vector<uint8_t> available_capabilities;
9977 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9978 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9979 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9980 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9981 if (supportBurst) {
9982 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9983 }
9984 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9985 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9986 if (hfrEnable && available_hfr_configs.array()) {
9987 available_capabilities.add(
9988 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9989 }
9990
9991 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9992 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9993 }
9994 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9995 available_capabilities.array(),
9996 available_capabilities.size());
9997
9998 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9999 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10000 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10001 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10002
10003 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10004 &aeLockAvailable, 1);
10005
10006 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10007 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10008 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10009 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10010
10011 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10012 &awbLockAvailable, 1);
10013
10014 int32_t max_input_streams = 1;
10015 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10016 &max_input_streams,
10017 1);
10018
10019 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10020 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10021 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10022 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10023 HAL_PIXEL_FORMAT_YCbCr_420_888};
10024 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10025 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10026
10027 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10028 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10029 &max_latency,
10030 1);
10031
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010032#ifndef USE_HAL_3_3
10033 int32_t isp_sensitivity_range[2];
10034 isp_sensitivity_range[0] =
10035 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10036 isp_sensitivity_range[1] =
10037 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10038 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10039 isp_sensitivity_range,
10040 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10041#endif
10042
Thierry Strudel3d639192016-09-09 11:52:26 -070010043 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10044 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10045 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10046 available_hot_pixel_modes,
10047 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10048
10049 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10050 ANDROID_SHADING_MODE_FAST,
10051 ANDROID_SHADING_MODE_HIGH_QUALITY};
10052 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10053 available_shading_modes,
10054 3);
10055
10056 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10057 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10058 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10059 available_lens_shading_map_modes,
10060 2);
10061
10062 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10063 ANDROID_EDGE_MODE_FAST,
10064 ANDROID_EDGE_MODE_HIGH_QUALITY,
10065 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10066 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10067 available_edge_modes,
10068 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10069
10070 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10071 ANDROID_NOISE_REDUCTION_MODE_FAST,
10072 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10073 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10074 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10075 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10076 available_noise_red_modes,
10077 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10078
10079 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10080 ANDROID_TONEMAP_MODE_FAST,
10081 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10082 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10083 available_tonemap_modes,
10084 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10085
10086 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10087 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10088 available_hot_pixel_map_modes,
10089 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10090
10091 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10092 gCamCapability[cameraId]->reference_illuminant1);
10093 if (NAME_NOT_FOUND != val) {
10094 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10095 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10096 }
10097
10098 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10099 gCamCapability[cameraId]->reference_illuminant2);
10100 if (NAME_NOT_FOUND != val) {
10101 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10102 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10103 }
10104
10105 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10106 (void *)gCamCapability[cameraId]->forward_matrix1,
10107 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10108
10109 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10110 (void *)gCamCapability[cameraId]->forward_matrix2,
10111 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10112
10113 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10114 (void *)gCamCapability[cameraId]->color_transform1,
10115 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10116
10117 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10118 (void *)gCamCapability[cameraId]->color_transform2,
10119 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10120
10121 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10122 (void *)gCamCapability[cameraId]->calibration_transform1,
10123 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10124
10125 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10126 (void *)gCamCapability[cameraId]->calibration_transform2,
10127 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10128
10129 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10130 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10131 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10132 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10133 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10134 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10135 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10136 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10137 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10138 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10139 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10140 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10141 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10142 ANDROID_JPEG_GPS_COORDINATES,
10143 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10144 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10145 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10146 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10147 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10148 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10149 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10150 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10151 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10152 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010153#ifndef USE_HAL_3_3
10154 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10155#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010156 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010157 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010158 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10159 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010160 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010161 /* DevCamDebug metadata request_keys_basic */
10162 DEVCAMDEBUG_META_ENABLE,
10163 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010164 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010165 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010166 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010167 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010168 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010169 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010170
10171 size_t request_keys_cnt =
10172 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10173 Vector<int32_t> available_request_keys;
10174 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10175 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10176 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10177 }
10178
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010179 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010180 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10181 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10182 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010183 }
10184
Thierry Strudel3d639192016-09-09 11:52:26 -070010185 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10186 available_request_keys.array(), available_request_keys.size());
10187
10188 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10189 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10190 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10191 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10192 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10193 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10194 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10195 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10196 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10197 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10198 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10199 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10200 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10201 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10202 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10203 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10204 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010205 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010206 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10207 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10208 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010209 ANDROID_STATISTICS_FACE_SCORES,
10210#ifndef USE_HAL_3_3
10211 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10212#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010213 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010214 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010215 // DevCamDebug metadata result_keys_basic
10216 DEVCAMDEBUG_META_ENABLE,
10217 // DevCamDebug metadata result_keys AF
10218 DEVCAMDEBUG_AF_LENS_POSITION,
10219 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10220 DEVCAMDEBUG_AF_TOF_DISTANCE,
10221 DEVCAMDEBUG_AF_LUMA,
10222 DEVCAMDEBUG_AF_HAF_STATE,
10223 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10224 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10225 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10226 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10227 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10228 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10229 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10230 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10231 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10232 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10233 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10234 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10235 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10236 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10237 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10238 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10239 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10240 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10241 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10242 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10243 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10244 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10245 // DevCamDebug metadata result_keys AEC
10246 DEVCAMDEBUG_AEC_TARGET_LUMA,
10247 DEVCAMDEBUG_AEC_COMP_LUMA,
10248 DEVCAMDEBUG_AEC_AVG_LUMA,
10249 DEVCAMDEBUG_AEC_CUR_LUMA,
10250 DEVCAMDEBUG_AEC_LINECOUNT,
10251 DEVCAMDEBUG_AEC_REAL_GAIN,
10252 DEVCAMDEBUG_AEC_EXP_INDEX,
10253 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010254 // DevCamDebug metadata result_keys zzHDR
10255 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10256 DEVCAMDEBUG_AEC_L_LINECOUNT,
10257 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10258 DEVCAMDEBUG_AEC_S_LINECOUNT,
10259 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10260 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10261 // DevCamDebug metadata result_keys ADRC
10262 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10263 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10264 DEVCAMDEBUG_AEC_GTM_RATIO,
10265 DEVCAMDEBUG_AEC_LTM_RATIO,
10266 DEVCAMDEBUG_AEC_LA_RATIO,
10267 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010268 // DevCamDebug metadata result_keys AEC MOTION
10269 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10270 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10271 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010272 // DevCamDebug metadata result_keys AWB
10273 DEVCAMDEBUG_AWB_R_GAIN,
10274 DEVCAMDEBUG_AWB_G_GAIN,
10275 DEVCAMDEBUG_AWB_B_GAIN,
10276 DEVCAMDEBUG_AWB_CCT,
10277 DEVCAMDEBUG_AWB_DECISION,
10278 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010279 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10280 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10281 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010282 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010283 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010284 };
10285
Thierry Strudel3d639192016-09-09 11:52:26 -070010286 size_t result_keys_cnt =
10287 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10288
10289 Vector<int32_t> available_result_keys;
10290 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10291 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10292 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10293 }
10294 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10295 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10296 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10297 }
10298 if (supportedFaceDetectMode == 1) {
10299 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10300 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10301 } else if ((supportedFaceDetectMode == 2) ||
10302 (supportedFaceDetectMode == 3)) {
10303 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10304 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10305 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010306#ifndef USE_HAL_3_3
10307 if (hasBlackRegions) {
10308 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10309 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10310 }
10311#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010312
10313 if (gExposeEnableZslKey) {
10314 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10315 }
10316
Thierry Strudel3d639192016-09-09 11:52:26 -070010317 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10318 available_result_keys.array(), available_result_keys.size());
10319
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010320 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010321 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10322 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10323 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10324 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10325 ANDROID_SCALER_CROPPING_TYPE,
10326 ANDROID_SYNC_MAX_LATENCY,
10327 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10328 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10329 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10330 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10331 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10332 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10333 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10334 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10335 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10336 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10337 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10338 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10339 ANDROID_LENS_FACING,
10340 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10341 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10342 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10343 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10344 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10345 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10346 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10347 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10348 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10349 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10350 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10351 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10352 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10353 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10354 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10355 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10356 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10357 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10358 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10359 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010360 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010361 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10362 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10363 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10364 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10365 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10366 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10367 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10368 ANDROID_CONTROL_AVAILABLE_MODES,
10369 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10370 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10371 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10372 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010373 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10374#ifndef USE_HAL_3_3
10375 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10376 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10377#endif
10378 };
10379
10380 Vector<int32_t> available_characteristics_keys;
10381 available_characteristics_keys.appendArray(characteristics_keys_basic,
10382 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10383#ifndef USE_HAL_3_3
10384 if (hasBlackRegions) {
10385 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10386 }
10387#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010388
10389 if (0 <= indexPD) {
10390 int32_t depthKeys[] = {
10391 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10392 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10393 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10394 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10395 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10396 };
10397 available_characteristics_keys.appendArray(depthKeys,
10398 sizeof(depthKeys) / sizeof(depthKeys[0]));
10399 }
10400
Thierry Strudel3d639192016-09-09 11:52:26 -070010401 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010402 available_characteristics_keys.array(),
10403 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010404
10405 /*available stall durations depend on the hw + sw and will be different for different devices */
10406 /*have to add for raw after implementation*/
10407 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10408 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10409
10410 Vector<int64_t> available_stall_durations;
10411 for (uint32_t j = 0; j < stall_formats_count; j++) {
10412 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10413 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10414 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10415 available_stall_durations.add(stall_formats[j]);
10416 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10417 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10418 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10419 }
10420 } else {
10421 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10422 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10423 available_stall_durations.add(stall_formats[j]);
10424 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10425 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10426 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10427 }
10428 }
10429 }
10430 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10431 available_stall_durations.array(),
10432 available_stall_durations.size());
10433
10434 //QCAMERA3_OPAQUE_RAW
10435 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10436 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10437 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10438 case LEGACY_RAW:
10439 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10440 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10441 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10442 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10443 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10444 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10445 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10446 break;
10447 case MIPI_RAW:
10448 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10449 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10450 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10451 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10452 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10453 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10454 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10455 break;
10456 default:
10457 LOGE("unknown opaque_raw_format %d",
10458 gCamCapability[cameraId]->opaque_raw_fmt);
10459 break;
10460 }
10461 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10462
10463 Vector<int32_t> strides;
10464 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10465 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10466 cam_stream_buf_plane_info_t buf_planes;
10467 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10468 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10469 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10470 &gCamCapability[cameraId]->padding_info, &buf_planes);
10471 strides.add(buf_planes.plane_info.mp[0].stride);
10472 }
10473 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10474 strides.size());
10475
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010476 //TBD: remove the following line once backend advertises zzHDR in feature mask
10477 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010478 //Video HDR default
10479 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10480 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010481 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010482 int32_t vhdr_mode[] = {
10483 QCAMERA3_VIDEO_HDR_MODE_OFF,
10484 QCAMERA3_VIDEO_HDR_MODE_ON};
10485
10486 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10487 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10488 vhdr_mode, vhdr_mode_count);
10489 }
10490
Thierry Strudel3d639192016-09-09 11:52:26 -070010491 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10492 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10493 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10494
10495 uint8_t isMonoOnly =
10496 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10497 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10498 &isMonoOnly, 1);
10499
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010500#ifndef USE_HAL_3_3
10501 Vector<int32_t> opaque_size;
10502 for (size_t j = 0; j < scalar_formats_count; j++) {
10503 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10504 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10505 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10506 cam_stream_buf_plane_info_t buf_planes;
10507
10508 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10509 &gCamCapability[cameraId]->padding_info, &buf_planes);
10510
10511 if (rc == 0) {
10512 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10513 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10514 opaque_size.add(buf_planes.plane_info.frame_len);
10515 }else {
10516 LOGE("raw frame calculation failed!");
10517 }
10518 }
10519 }
10520 }
10521
10522 if ((opaque_size.size() > 0) &&
10523 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10524 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10525 else
10526 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10527#endif
10528
Thierry Strudel04e026f2016-10-10 11:27:36 -070010529 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10530 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10531 size = 0;
10532 count = CAM_IR_MODE_MAX;
10533 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10534 for (size_t i = 0; i < count; i++) {
10535 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10536 gCamCapability[cameraId]->supported_ir_modes[i]);
10537 if (NAME_NOT_FOUND != val) {
10538 avail_ir_modes[size] = (int32_t)val;
10539 size++;
10540 }
10541 }
10542 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10543 avail_ir_modes, size);
10544 }
10545
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010546 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10547 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10548 size = 0;
10549 count = CAM_AEC_CONVERGENCE_MAX;
10550 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10551 for (size_t i = 0; i < count; i++) {
10552 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10553 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10554 if (NAME_NOT_FOUND != val) {
10555 available_instant_aec_modes[size] = (int32_t)val;
10556 size++;
10557 }
10558 }
10559 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10560 available_instant_aec_modes, size);
10561 }
10562
Thierry Strudel54dc9782017-02-15 12:12:10 -080010563 int32_t sharpness_range[] = {
10564 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10565 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10566 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10567
10568 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10569 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10570 size = 0;
10571 count = CAM_BINNING_CORRECTION_MODE_MAX;
10572 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10573 for (size_t i = 0; i < count; i++) {
10574 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10575 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10576 gCamCapability[cameraId]->supported_binning_modes[i]);
10577 if (NAME_NOT_FOUND != val) {
10578 avail_binning_modes[size] = (int32_t)val;
10579 size++;
10580 }
10581 }
10582 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10583 avail_binning_modes, size);
10584 }
10585
10586 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10587 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10588 size = 0;
10589 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10590 for (size_t i = 0; i < count; i++) {
10591 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10592 gCamCapability[cameraId]->supported_aec_modes[i]);
10593 if (NAME_NOT_FOUND != val)
10594 available_aec_modes[size++] = val;
10595 }
10596 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10597 available_aec_modes, size);
10598 }
10599
10600 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10601 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10602 size = 0;
10603 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10604 for (size_t i = 0; i < count; i++) {
10605 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10606 gCamCapability[cameraId]->supported_iso_modes[i]);
10607 if (NAME_NOT_FOUND != val)
10608 available_iso_modes[size++] = val;
10609 }
10610 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10611 available_iso_modes, size);
10612 }
10613
10614 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010615 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010616 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10617 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10618 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10619
10620 int32_t available_saturation_range[4];
10621 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10622 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10623 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10624 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10625 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10626 available_saturation_range, 4);
10627
10628 uint8_t is_hdr_values[2];
10629 is_hdr_values[0] = 0;
10630 is_hdr_values[1] = 1;
10631 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10632 is_hdr_values, 2);
10633
10634 float is_hdr_confidence_range[2];
10635 is_hdr_confidence_range[0] = 0.0;
10636 is_hdr_confidence_range[1] = 1.0;
10637 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10638 is_hdr_confidence_range, 2);
10639
Emilian Peev0a972ef2017-03-16 10:25:53 +000010640 size_t eepromLength = strnlen(
10641 reinterpret_cast<const char *>(
10642 gCamCapability[cameraId]->eeprom_version_info),
10643 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10644 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010645 char easelInfo[] = ",E:N";
10646 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10647 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10648 eepromLength += sizeof(easelInfo);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010649 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10650 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010651 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010652 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010653 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10654 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10655 }
10656
Thierry Strudel3d639192016-09-09 11:52:26 -070010657 gStaticMetadata[cameraId] = staticInfo.release();
10658 return rc;
10659}
10660
10661/*===========================================================================
10662 * FUNCTION : makeTable
10663 *
10664 * DESCRIPTION: make a table of sizes
10665 *
10666 * PARAMETERS :
10667 *
10668 *
10669 *==========================================================================*/
10670void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10671 size_t max_size, int32_t *sizeTable)
10672{
10673 size_t j = 0;
10674 if (size > max_size) {
10675 size = max_size;
10676 }
10677 for (size_t i = 0; i < size; i++) {
10678 sizeTable[j] = dimTable[i].width;
10679 sizeTable[j+1] = dimTable[i].height;
10680 j+=2;
10681 }
10682}
10683
10684/*===========================================================================
10685 * FUNCTION : makeFPSTable
10686 *
10687 * DESCRIPTION: make a table of fps ranges
10688 *
10689 * PARAMETERS :
10690 *
10691 *==========================================================================*/
10692void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10693 size_t max_size, int32_t *fpsRangesTable)
10694{
10695 size_t j = 0;
10696 if (size > max_size) {
10697 size = max_size;
10698 }
10699 for (size_t i = 0; i < size; i++) {
10700 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10701 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10702 j+=2;
10703 }
10704}
10705
10706/*===========================================================================
10707 * FUNCTION : makeOverridesList
10708 *
10709 * DESCRIPTION: make a list of scene mode overrides
10710 *
10711 * PARAMETERS :
10712 *
10713 *
10714 *==========================================================================*/
10715void QCamera3HardwareInterface::makeOverridesList(
10716 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10717 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10718{
10719 /*daemon will give a list of overrides for all scene modes.
10720 However we should send the fwk only the overrides for the scene modes
10721 supported by the framework*/
10722 size_t j = 0;
10723 if (size > max_size) {
10724 size = max_size;
10725 }
10726 size_t focus_count = CAM_FOCUS_MODE_MAX;
10727 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10728 focus_count);
10729 for (size_t i = 0; i < size; i++) {
10730 bool supt = false;
10731 size_t index = supported_indexes[i];
10732 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10733 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10734 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10735 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10736 overridesTable[index].awb_mode);
10737 if (NAME_NOT_FOUND != val) {
10738 overridesList[j+1] = (uint8_t)val;
10739 }
10740 uint8_t focus_override = overridesTable[index].af_mode;
10741 for (size_t k = 0; k < focus_count; k++) {
10742 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10743 supt = true;
10744 break;
10745 }
10746 }
10747 if (supt) {
10748 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10749 focus_override);
10750 if (NAME_NOT_FOUND != val) {
10751 overridesList[j+2] = (uint8_t)val;
10752 }
10753 } else {
10754 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10755 }
10756 j+=3;
10757 }
10758}
10759
10760/*===========================================================================
10761 * FUNCTION : filterJpegSizes
10762 *
10763 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10764 * could be downscaled to
10765 *
10766 * PARAMETERS :
10767 *
10768 * RETURN : length of jpegSizes array
10769 *==========================================================================*/
10770
10771size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10772 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10773 uint8_t downscale_factor)
10774{
10775 if (0 == downscale_factor) {
10776 downscale_factor = 1;
10777 }
10778
10779 int32_t min_width = active_array_size.width / downscale_factor;
10780 int32_t min_height = active_array_size.height / downscale_factor;
10781 size_t jpegSizesCnt = 0;
10782 if (processedSizesCnt > maxCount) {
10783 processedSizesCnt = maxCount;
10784 }
10785 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10786 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10787 jpegSizes[jpegSizesCnt] = processedSizes[i];
10788 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10789 jpegSizesCnt += 2;
10790 }
10791 }
10792 return jpegSizesCnt;
10793}
10794
10795/*===========================================================================
10796 * FUNCTION : computeNoiseModelEntryS
10797 *
10798 * DESCRIPTION: function to map a given sensitivity to the S noise
10799 * model parameters in the DNG noise model.
10800 *
10801 * PARAMETERS : sens : the sensor sensitivity
10802 *
10803 ** RETURN : S (sensor amplification) noise
10804 *
10805 *==========================================================================*/
10806double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10807 double s = gCamCapability[mCameraId]->gradient_S * sens +
10808 gCamCapability[mCameraId]->offset_S;
10809 return ((s < 0.0) ? 0.0 : s);
10810}
10811
10812/*===========================================================================
10813 * FUNCTION : computeNoiseModelEntryO
10814 *
10815 * DESCRIPTION: function to map a given sensitivity to the O noise
10816 * model parameters in the DNG noise model.
10817 *
10818 * PARAMETERS : sens : the sensor sensitivity
10819 *
10820 ** RETURN : O (sensor readout) noise
10821 *
10822 *==========================================================================*/
10823double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10824 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10825 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10826 1.0 : (1.0 * sens / max_analog_sens);
10827 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10828 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10829 return ((o < 0.0) ? 0.0 : o);
10830}
10831
10832/*===========================================================================
10833 * FUNCTION : getSensorSensitivity
10834 *
10835 * DESCRIPTION: convert iso_mode to an integer value
10836 *
10837 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10838 *
10839 ** RETURN : sensitivity supported by sensor
10840 *
10841 *==========================================================================*/
10842int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10843{
10844 int32_t sensitivity;
10845
10846 switch (iso_mode) {
10847 case CAM_ISO_MODE_100:
10848 sensitivity = 100;
10849 break;
10850 case CAM_ISO_MODE_200:
10851 sensitivity = 200;
10852 break;
10853 case CAM_ISO_MODE_400:
10854 sensitivity = 400;
10855 break;
10856 case CAM_ISO_MODE_800:
10857 sensitivity = 800;
10858 break;
10859 case CAM_ISO_MODE_1600:
10860 sensitivity = 1600;
10861 break;
10862 default:
10863 sensitivity = -1;
10864 break;
10865 }
10866 return sensitivity;
10867}
10868
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010869int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010870 if (gEaselManagerClient == nullptr) {
10871 gEaselManagerClient = EaselManagerClient::create();
10872 if (gEaselManagerClient == nullptr) {
10873 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10874 return -ENODEV;
10875 }
10876 }
10877
10878 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010879 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10880 // to connect to Easel.
10881 bool doNotpowerOnEasel =
10882 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10883
10884 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010885 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10886 return OK;
10887 }
10888
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010889 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010890 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010891 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010892 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010893 return res;
10894 }
10895
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010896 EaselManagerClientOpened = true;
10897
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010898 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010899 if (res != OK) {
10900 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10901 }
10902
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010903 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010904 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010905
10906 // Expose enableZsl key only when HDR+ mode is enabled.
10907 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010908 }
10909
10910 return OK;
10911}
10912
Thierry Strudel3d639192016-09-09 11:52:26 -070010913/*===========================================================================
10914 * FUNCTION : getCamInfo
10915 *
10916 * DESCRIPTION: query camera capabilities
10917 *
10918 * PARAMETERS :
10919 * @cameraId : camera Id
10920 * @info : camera info struct to be filled in with camera capabilities
10921 *
10922 * RETURN : int type of status
10923 * NO_ERROR -- success
10924 * none-zero failure code
10925 *==========================================================================*/
10926int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10927 struct camera_info *info)
10928{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010929 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010930 int rc = 0;
10931
10932 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010933
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010934 {
10935 Mutex::Autolock l(gHdrPlusClientLock);
10936 rc = initHdrPlusClientLocked();
10937 if (rc != OK) {
10938 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10939 pthread_mutex_unlock(&gCamLock);
10940 return rc;
10941 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010942 }
10943
Thierry Strudel3d639192016-09-09 11:52:26 -070010944 if (NULL == gCamCapability[cameraId]) {
10945 rc = initCapabilities(cameraId);
10946 if (rc < 0) {
10947 pthread_mutex_unlock(&gCamLock);
10948 return rc;
10949 }
10950 }
10951
10952 if (NULL == gStaticMetadata[cameraId]) {
10953 rc = initStaticMetadata(cameraId);
10954 if (rc < 0) {
10955 pthread_mutex_unlock(&gCamLock);
10956 return rc;
10957 }
10958 }
10959
10960 switch(gCamCapability[cameraId]->position) {
10961 case CAM_POSITION_BACK:
10962 case CAM_POSITION_BACK_AUX:
10963 info->facing = CAMERA_FACING_BACK;
10964 break;
10965
10966 case CAM_POSITION_FRONT:
10967 case CAM_POSITION_FRONT_AUX:
10968 info->facing = CAMERA_FACING_FRONT;
10969 break;
10970
10971 default:
10972 LOGE("Unknown position type %d for camera id:%d",
10973 gCamCapability[cameraId]->position, cameraId);
10974 rc = -1;
10975 break;
10976 }
10977
10978
10979 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010980#ifndef USE_HAL_3_3
10981 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10982#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010983 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010984#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010985 info->static_camera_characteristics = gStaticMetadata[cameraId];
10986
10987 //For now assume both cameras can operate independently.
10988 info->conflicting_devices = NULL;
10989 info->conflicting_devices_length = 0;
10990
10991 //resource cost is 100 * MIN(1.0, m/M),
10992 //where m is throughput requirement with maximum stream configuration
10993 //and M is CPP maximum throughput.
10994 float max_fps = 0.0;
10995 for (uint32_t i = 0;
10996 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10997 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10998 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10999 }
11000 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11001 gCamCapability[cameraId]->active_array_size.width *
11002 gCamCapability[cameraId]->active_array_size.height * max_fps /
11003 gCamCapability[cameraId]->max_pixel_bandwidth;
11004 info->resource_cost = 100 * MIN(1.0, ratio);
11005 LOGI("camera %d resource cost is %d", cameraId,
11006 info->resource_cost);
11007
11008 pthread_mutex_unlock(&gCamLock);
11009 return rc;
11010}
11011
11012/*===========================================================================
11013 * FUNCTION : translateCapabilityToMetadata
11014 *
11015 * DESCRIPTION: translate the capability into camera_metadata_t
11016 *
11017 * PARAMETERS : type of the request
11018 *
11019 *
11020 * RETURN : success: camera_metadata_t*
11021 * failure: NULL
11022 *
11023 *==========================================================================*/
11024camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11025{
11026 if (mDefaultMetadata[type] != NULL) {
11027 return mDefaultMetadata[type];
11028 }
11029 //first time we are handling this request
11030 //fill up the metadata structure using the wrapper class
11031 CameraMetadata settings;
11032 //translate from cam_capability_t to camera_metadata_tag_t
11033 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11034 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11035 int32_t defaultRequestID = 0;
11036 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11037
11038 /* OIS disable */
11039 char ois_prop[PROPERTY_VALUE_MAX];
11040 memset(ois_prop, 0, sizeof(ois_prop));
11041 property_get("persist.camera.ois.disable", ois_prop, "0");
11042 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11043
11044 /* Force video to use OIS */
11045 char videoOisProp[PROPERTY_VALUE_MAX];
11046 memset(videoOisProp, 0, sizeof(videoOisProp));
11047 property_get("persist.camera.ois.video", videoOisProp, "1");
11048 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011049
11050 // Hybrid AE enable/disable
11051 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11052 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11053 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11054 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11055
Thierry Strudel3d639192016-09-09 11:52:26 -070011056 uint8_t controlIntent = 0;
11057 uint8_t focusMode;
11058 uint8_t vsMode;
11059 uint8_t optStabMode;
11060 uint8_t cacMode;
11061 uint8_t edge_mode;
11062 uint8_t noise_red_mode;
11063 uint8_t tonemap_mode;
11064 bool highQualityModeEntryAvailable = FALSE;
11065 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011066 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011067 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11068 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011069 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011070 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011071 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011072
Thierry Strudel3d639192016-09-09 11:52:26 -070011073 switch (type) {
11074 case CAMERA3_TEMPLATE_PREVIEW:
11075 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11076 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11077 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11078 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11079 edge_mode = ANDROID_EDGE_MODE_FAST;
11080 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11081 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11082 break;
11083 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11084 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11085 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11086 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11087 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11088 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11089 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11090 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11091 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11092 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11093 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11094 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11095 highQualityModeEntryAvailable = TRUE;
11096 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11097 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11098 fastModeEntryAvailable = TRUE;
11099 }
11100 }
11101 if (highQualityModeEntryAvailable) {
11102 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11103 } else if (fastModeEntryAvailable) {
11104 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11105 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011106 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11107 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11108 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011109 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011110 break;
11111 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11112 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11113 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11114 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011115 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11116 edge_mode = ANDROID_EDGE_MODE_FAST;
11117 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11118 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11119 if (forceVideoOis)
11120 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11121 break;
11122 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11123 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11124 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11125 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011126 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11127 edge_mode = ANDROID_EDGE_MODE_FAST;
11128 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11129 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11130 if (forceVideoOis)
11131 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11132 break;
11133 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11134 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11135 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11136 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11137 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11138 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11139 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11140 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11141 break;
11142 case CAMERA3_TEMPLATE_MANUAL:
11143 edge_mode = ANDROID_EDGE_MODE_FAST;
11144 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11145 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11146 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11147 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11148 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11149 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11150 break;
11151 default:
11152 edge_mode = ANDROID_EDGE_MODE_FAST;
11153 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11154 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11155 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11156 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11157 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11158 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11159 break;
11160 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011161 // Set CAC to OFF if underlying device doesn't support
11162 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11163 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11164 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011165 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11166 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11167 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11168 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11169 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11170 }
11171 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011172 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011173 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011174
11175 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11176 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11177 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11178 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11179 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11180 || ois_disable)
11181 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11182 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011183 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011184
11185 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11186 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11187
11188 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11189 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11190
11191 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11192 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11193
11194 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11195 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11196
11197 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11198 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11199
11200 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11201 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11202
11203 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11204 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11205
11206 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11207 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11208
11209 /*flash*/
11210 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11211 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11212
11213 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11214 settings.update(ANDROID_FLASH_FIRING_POWER,
11215 &flashFiringLevel, 1);
11216
11217 /* lens */
11218 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11219 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11220
11221 if (gCamCapability[mCameraId]->filter_densities_count) {
11222 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11223 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11224 gCamCapability[mCameraId]->filter_densities_count);
11225 }
11226
11227 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11228 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11229
Thierry Strudel3d639192016-09-09 11:52:26 -070011230 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11231 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11232
11233 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11234 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11235
11236 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11237 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11238
11239 /* face detection (default to OFF) */
11240 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11241 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11242
Thierry Strudel54dc9782017-02-15 12:12:10 -080011243 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11244 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011245
11246 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11247 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11248
11249 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11250 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11251
Thierry Strudel3d639192016-09-09 11:52:26 -070011252
11253 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11254 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11255
11256 /* Exposure time(Update the Min Exposure Time)*/
11257 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11258 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11259
11260 /* frame duration */
11261 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11262 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11263
11264 /* sensitivity */
11265 static const int32_t default_sensitivity = 100;
11266 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011267#ifndef USE_HAL_3_3
11268 static const int32_t default_isp_sensitivity =
11269 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11270 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11271#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011272
11273 /*edge mode*/
11274 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11275
11276 /*noise reduction mode*/
11277 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11278
11279 /*color correction mode*/
11280 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11281 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11282
11283 /*transform matrix mode*/
11284 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11285
11286 int32_t scaler_crop_region[4];
11287 scaler_crop_region[0] = 0;
11288 scaler_crop_region[1] = 0;
11289 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11290 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11291 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11292
11293 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11294 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11295
11296 /*focus distance*/
11297 float focus_distance = 0.0;
11298 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11299
11300 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011301 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011302 float max_range = 0.0;
11303 float max_fixed_fps = 0.0;
11304 int32_t fps_range[2] = {0, 0};
11305 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11306 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011307 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11308 TEMPLATE_MAX_PREVIEW_FPS) {
11309 continue;
11310 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011311 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11312 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11313 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11314 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11315 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11316 if (range > max_range) {
11317 fps_range[0] =
11318 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11319 fps_range[1] =
11320 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11321 max_range = range;
11322 }
11323 } else {
11324 if (range < 0.01 && max_fixed_fps <
11325 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11326 fps_range[0] =
11327 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11328 fps_range[1] =
11329 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11330 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11331 }
11332 }
11333 }
11334 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11335
11336 /*precapture trigger*/
11337 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11338 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11339
11340 /*af trigger*/
11341 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11342 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11343
11344 /* ae & af regions */
11345 int32_t active_region[] = {
11346 gCamCapability[mCameraId]->active_array_size.left,
11347 gCamCapability[mCameraId]->active_array_size.top,
11348 gCamCapability[mCameraId]->active_array_size.left +
11349 gCamCapability[mCameraId]->active_array_size.width,
11350 gCamCapability[mCameraId]->active_array_size.top +
11351 gCamCapability[mCameraId]->active_array_size.height,
11352 0};
11353 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11354 sizeof(active_region) / sizeof(active_region[0]));
11355 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11356 sizeof(active_region) / sizeof(active_region[0]));
11357
11358 /* black level lock */
11359 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11360 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11361
Thierry Strudel3d639192016-09-09 11:52:26 -070011362 //special defaults for manual template
11363 if (type == CAMERA3_TEMPLATE_MANUAL) {
11364 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11365 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11366
11367 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11368 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11369
11370 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11371 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11372
11373 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11374 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11375
11376 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11377 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11378
11379 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11380 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11381 }
11382
11383
11384 /* TNR
11385 * We'll use this location to determine which modes TNR will be set.
11386 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11387 * This is not to be confused with linking on a per stream basis that decision
11388 * is still on per-session basis and will be handled as part of config stream
11389 */
11390 uint8_t tnr_enable = 0;
11391
11392 if (m_bTnrPreview || m_bTnrVideo) {
11393
11394 switch (type) {
11395 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11396 tnr_enable = 1;
11397 break;
11398
11399 default:
11400 tnr_enable = 0;
11401 break;
11402 }
11403
11404 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11405 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11406 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11407
11408 LOGD("TNR:%d with process plate %d for template:%d",
11409 tnr_enable, tnr_process_type, type);
11410 }
11411
11412 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011413 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011414 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11415
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011416 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011417 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11418
Shuzhen Wang920ea402017-05-03 08:49:39 -070011419 uint8_t related_camera_id = mCameraId;
11420 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011421
11422 /* CDS default */
11423 char prop[PROPERTY_VALUE_MAX];
11424 memset(prop, 0, sizeof(prop));
11425 property_get("persist.camera.CDS", prop, "Auto");
11426 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11427 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11428 if (CAM_CDS_MODE_MAX == cds_mode) {
11429 cds_mode = CAM_CDS_MODE_AUTO;
11430 }
11431
11432 /* Disabling CDS in templates which have TNR enabled*/
11433 if (tnr_enable)
11434 cds_mode = CAM_CDS_MODE_OFF;
11435
11436 int32_t mode = cds_mode;
11437 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011438
Thierry Strudel269c81a2016-10-12 12:13:59 -070011439 /* Manual Convergence AEC Speed is disabled by default*/
11440 float default_aec_speed = 0;
11441 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11442
11443 /* Manual Convergence AWB Speed is disabled by default*/
11444 float default_awb_speed = 0;
11445 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11446
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011447 // Set instant AEC to normal convergence by default
11448 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11449 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11450
Shuzhen Wang19463d72016-03-08 11:09:52 -080011451 /* hybrid ae */
11452 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11453
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011454 if (gExposeEnableZslKey) {
11455 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11456 }
11457
Thierry Strudel3d639192016-09-09 11:52:26 -070011458 mDefaultMetadata[type] = settings.release();
11459
11460 return mDefaultMetadata[type];
11461}
11462
11463/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011464 * FUNCTION : getExpectedFrameDuration
11465 *
11466 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11467 * duration
11468 *
11469 * PARAMETERS :
11470 * @request : request settings
11471 * @frameDuration : The maximum frame duration in nanoseconds
11472 *
11473 * RETURN : None
11474 *==========================================================================*/
11475void QCamera3HardwareInterface::getExpectedFrameDuration(
11476 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11477 if (nullptr == frameDuration) {
11478 return;
11479 }
11480
11481 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11482 find_camera_metadata_ro_entry(request,
11483 ANDROID_SENSOR_EXPOSURE_TIME,
11484 &e);
11485 if (e.count > 0) {
11486 *frameDuration = e.data.i64[0];
11487 }
11488 find_camera_metadata_ro_entry(request,
11489 ANDROID_SENSOR_FRAME_DURATION,
11490 &e);
11491 if (e.count > 0) {
11492 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11493 }
11494}
11495
11496/*===========================================================================
11497 * FUNCTION : calculateMaxExpectedDuration
11498 *
11499 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11500 * current camera settings.
11501 *
11502 * PARAMETERS :
11503 * @request : request settings
11504 *
11505 * RETURN : Expected frame duration in nanoseconds.
11506 *==========================================================================*/
11507nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11508 const camera_metadata_t *request) {
11509 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11510 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11511 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11512 if (e.count == 0) {
11513 return maxExpectedDuration;
11514 }
11515
11516 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11517 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11518 }
11519
11520 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11521 return maxExpectedDuration;
11522 }
11523
11524 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11525 if (e.count == 0) {
11526 return maxExpectedDuration;
11527 }
11528
11529 switch (e.data.u8[0]) {
11530 case ANDROID_CONTROL_AE_MODE_OFF:
11531 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11532 break;
11533 default:
11534 find_camera_metadata_ro_entry(request,
11535 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11536 &e);
11537 if (e.count > 1) {
11538 maxExpectedDuration = 1e9 / e.data.u8[0];
11539 }
11540 break;
11541 }
11542
11543 return maxExpectedDuration;
11544}
11545
11546/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011547 * FUNCTION : setFrameParameters
11548 *
11549 * DESCRIPTION: set parameters per frame as requested in the metadata from
11550 * framework
11551 *
11552 * PARAMETERS :
11553 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011554 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011555 * @blob_request: Whether this request is a blob request or not
11556 *
11557 * RETURN : success: NO_ERROR
11558 * failure:
11559 *==========================================================================*/
11560int QCamera3HardwareInterface::setFrameParameters(
11561 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011562 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011563 int blob_request,
11564 uint32_t snapshotStreamId)
11565{
11566 /*translate from camera_metadata_t type to parm_type_t*/
11567 int rc = 0;
11568 int32_t hal_version = CAM_HAL_V3;
11569
11570 clear_metadata_buffer(mParameters);
11571 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11572 LOGE("Failed to set hal version in the parameters");
11573 return BAD_VALUE;
11574 }
11575
11576 /*we need to update the frame number in the parameters*/
11577 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11578 request->frame_number)) {
11579 LOGE("Failed to set the frame number in the parameters");
11580 return BAD_VALUE;
11581 }
11582
11583 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011584 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011585 LOGE("Failed to set stream type mask in the parameters");
11586 return BAD_VALUE;
11587 }
11588
11589 if (mUpdateDebugLevel) {
11590 uint32_t dummyDebugLevel = 0;
11591 /* The value of dummyDebugLevel is irrelavent. On
11592 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11593 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11594 dummyDebugLevel)) {
11595 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11596 return BAD_VALUE;
11597 }
11598 mUpdateDebugLevel = false;
11599 }
11600
11601 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011602 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011603 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11604 if (blob_request)
11605 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11606 }
11607
11608 return rc;
11609}
11610
11611/*===========================================================================
11612 * FUNCTION : setReprocParameters
11613 *
11614 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11615 * return it.
11616 *
11617 * PARAMETERS :
11618 * @request : request that needs to be serviced
11619 *
11620 * RETURN : success: NO_ERROR
11621 * failure:
11622 *==========================================================================*/
11623int32_t QCamera3HardwareInterface::setReprocParameters(
11624 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11625 uint32_t snapshotStreamId)
11626{
11627 /*translate from camera_metadata_t type to parm_type_t*/
11628 int rc = 0;
11629
11630 if (NULL == request->settings){
11631 LOGE("Reprocess settings cannot be NULL");
11632 return BAD_VALUE;
11633 }
11634
11635 if (NULL == reprocParam) {
11636 LOGE("Invalid reprocessing metadata buffer");
11637 return BAD_VALUE;
11638 }
11639 clear_metadata_buffer(reprocParam);
11640
11641 /*we need to update the frame number in the parameters*/
11642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11643 request->frame_number)) {
11644 LOGE("Failed to set the frame number in the parameters");
11645 return BAD_VALUE;
11646 }
11647
11648 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11649 if (rc < 0) {
11650 LOGE("Failed to translate reproc request");
11651 return rc;
11652 }
11653
11654 CameraMetadata frame_settings;
11655 frame_settings = request->settings;
11656 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11657 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11658 int32_t *crop_count =
11659 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11660 int32_t *crop_data =
11661 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11662 int32_t *roi_map =
11663 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11664 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11665 cam_crop_data_t crop_meta;
11666 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11667 crop_meta.num_of_streams = 1;
11668 crop_meta.crop_info[0].crop.left = crop_data[0];
11669 crop_meta.crop_info[0].crop.top = crop_data[1];
11670 crop_meta.crop_info[0].crop.width = crop_data[2];
11671 crop_meta.crop_info[0].crop.height = crop_data[3];
11672
11673 crop_meta.crop_info[0].roi_map.left =
11674 roi_map[0];
11675 crop_meta.crop_info[0].roi_map.top =
11676 roi_map[1];
11677 crop_meta.crop_info[0].roi_map.width =
11678 roi_map[2];
11679 crop_meta.crop_info[0].roi_map.height =
11680 roi_map[3];
11681
11682 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11683 rc = BAD_VALUE;
11684 }
11685 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11686 request->input_buffer->stream,
11687 crop_meta.crop_info[0].crop.left,
11688 crop_meta.crop_info[0].crop.top,
11689 crop_meta.crop_info[0].crop.width,
11690 crop_meta.crop_info[0].crop.height);
11691 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11692 request->input_buffer->stream,
11693 crop_meta.crop_info[0].roi_map.left,
11694 crop_meta.crop_info[0].roi_map.top,
11695 crop_meta.crop_info[0].roi_map.width,
11696 crop_meta.crop_info[0].roi_map.height);
11697 } else {
11698 LOGE("Invalid reprocess crop count %d!", *crop_count);
11699 }
11700 } else {
11701 LOGE("No crop data from matching output stream");
11702 }
11703
11704 /* These settings are not needed for regular requests so handle them specially for
11705 reprocess requests; information needed for EXIF tags */
11706 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11707 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11708 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11709 if (NAME_NOT_FOUND != val) {
11710 uint32_t flashMode = (uint32_t)val;
11711 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11712 rc = BAD_VALUE;
11713 }
11714 } else {
11715 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11716 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11717 }
11718 } else {
11719 LOGH("No flash mode in reprocess settings");
11720 }
11721
11722 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11723 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11724 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11725 rc = BAD_VALUE;
11726 }
11727 } else {
11728 LOGH("No flash state in reprocess settings");
11729 }
11730
11731 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11732 uint8_t *reprocessFlags =
11733 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11735 *reprocessFlags)) {
11736 rc = BAD_VALUE;
11737 }
11738 }
11739
Thierry Strudel54dc9782017-02-15 12:12:10 -080011740 // Add exif debug data to internal metadata
11741 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11742 mm_jpeg_debug_exif_params_t *debug_params =
11743 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11744 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11745 // AE
11746 if (debug_params->ae_debug_params_valid == TRUE) {
11747 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11748 debug_params->ae_debug_params);
11749 }
11750 // AWB
11751 if (debug_params->awb_debug_params_valid == TRUE) {
11752 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11753 debug_params->awb_debug_params);
11754 }
11755 // AF
11756 if (debug_params->af_debug_params_valid == TRUE) {
11757 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11758 debug_params->af_debug_params);
11759 }
11760 // ASD
11761 if (debug_params->asd_debug_params_valid == TRUE) {
11762 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11763 debug_params->asd_debug_params);
11764 }
11765 // Stats
11766 if (debug_params->stats_debug_params_valid == TRUE) {
11767 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11768 debug_params->stats_debug_params);
11769 }
11770 // BE Stats
11771 if (debug_params->bestats_debug_params_valid == TRUE) {
11772 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11773 debug_params->bestats_debug_params);
11774 }
11775 // BHIST
11776 if (debug_params->bhist_debug_params_valid == TRUE) {
11777 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11778 debug_params->bhist_debug_params);
11779 }
11780 // 3A Tuning
11781 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11782 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11783 debug_params->q3a_tuning_debug_params);
11784 }
11785 }
11786
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011787 // Add metadata which reprocess needs
11788 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11789 cam_reprocess_info_t *repro_info =
11790 (cam_reprocess_info_t *)frame_settings.find
11791 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011792 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011793 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011794 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011795 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011796 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011797 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011798 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011799 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011800 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011801 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011802 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011803 repro_info->pipeline_flip);
11804 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11805 repro_info->af_roi);
11806 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11807 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011808 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11809 CAM_INTF_PARM_ROTATION metadata then has been added in
11810 translateToHalMetadata. HAL need to keep this new rotation
11811 metadata. Otherwise, the old rotation info saved in the vendor tag
11812 would be used */
11813 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11814 CAM_INTF_PARM_ROTATION, reprocParam) {
11815 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11816 } else {
11817 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011818 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011819 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011820 }
11821
11822 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11823 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11824 roi.width and roi.height would be the final JPEG size.
11825 For now, HAL only checks this for reprocess request */
11826 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11827 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11828 uint8_t *enable =
11829 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11830 if (*enable == TRUE) {
11831 int32_t *crop_data =
11832 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11833 cam_stream_crop_info_t crop_meta;
11834 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11835 crop_meta.stream_id = 0;
11836 crop_meta.crop.left = crop_data[0];
11837 crop_meta.crop.top = crop_data[1];
11838 crop_meta.crop.width = crop_data[2];
11839 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011840 // The JPEG crop roi should match cpp output size
11841 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11842 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11843 crop_meta.roi_map.left = 0;
11844 crop_meta.roi_map.top = 0;
11845 crop_meta.roi_map.width = cpp_crop->crop.width;
11846 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011847 }
11848 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11849 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011850 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011851 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011852 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11853 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011854 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011855 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11856
11857 // Add JPEG scale information
11858 cam_dimension_t scale_dim;
11859 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11860 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11861 int32_t *roi =
11862 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11863 scale_dim.width = roi[2];
11864 scale_dim.height = roi[3];
11865 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11866 scale_dim);
11867 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11868 scale_dim.width, scale_dim.height, mCameraId);
11869 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011870 }
11871 }
11872
11873 return rc;
11874}
11875
11876/*===========================================================================
11877 * FUNCTION : saveRequestSettings
11878 *
11879 * DESCRIPTION: Add any settings that might have changed to the request settings
11880 * and save the settings to be applied on the frame
11881 *
11882 * PARAMETERS :
11883 * @jpegMetadata : the extracted and/or modified jpeg metadata
11884 * @request : request with initial settings
11885 *
11886 * RETURN :
11887 * camera_metadata_t* : pointer to the saved request settings
11888 *==========================================================================*/
11889camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11890 const CameraMetadata &jpegMetadata,
11891 camera3_capture_request_t *request)
11892{
11893 camera_metadata_t *resultMetadata;
11894 CameraMetadata camMetadata;
11895 camMetadata = request->settings;
11896
11897 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11898 int32_t thumbnail_size[2];
11899 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11900 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11901 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11902 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11903 }
11904
11905 if (request->input_buffer != NULL) {
11906 uint8_t reprocessFlags = 1;
11907 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11908 (uint8_t*)&reprocessFlags,
11909 sizeof(reprocessFlags));
11910 }
11911
11912 resultMetadata = camMetadata.release();
11913 return resultMetadata;
11914}
11915
11916/*===========================================================================
11917 * FUNCTION : setHalFpsRange
11918 *
11919 * DESCRIPTION: set FPS range parameter
11920 *
11921 *
11922 * PARAMETERS :
11923 * @settings : Metadata from framework
11924 * @hal_metadata: Metadata buffer
11925 *
11926 *
11927 * RETURN : success: NO_ERROR
11928 * failure:
11929 *==========================================================================*/
11930int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11931 metadata_buffer_t *hal_metadata)
11932{
11933 int32_t rc = NO_ERROR;
11934 cam_fps_range_t fps_range;
11935 fps_range.min_fps = (float)
11936 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11937 fps_range.max_fps = (float)
11938 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11939 fps_range.video_min_fps = fps_range.min_fps;
11940 fps_range.video_max_fps = fps_range.max_fps;
11941
11942 LOGD("aeTargetFpsRange fps: [%f %f]",
11943 fps_range.min_fps, fps_range.max_fps);
11944 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11945 * follows:
11946 * ---------------------------------------------------------------|
11947 * Video stream is absent in configure_streams |
11948 * (Camcorder preview before the first video record |
11949 * ---------------------------------------------------------------|
11950 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11951 * | | | vid_min/max_fps|
11952 * ---------------------------------------------------------------|
11953 * NO | [ 30, 240] | 240 | [240, 240] |
11954 * |-------------|-------------|----------------|
11955 * | [240, 240] | 240 | [240, 240] |
11956 * ---------------------------------------------------------------|
11957 * Video stream is present in configure_streams |
11958 * ---------------------------------------------------------------|
11959 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11960 * | | | vid_min/max_fps|
11961 * ---------------------------------------------------------------|
11962 * NO | [ 30, 240] | 240 | [240, 240] |
11963 * (camcorder prev |-------------|-------------|----------------|
11964 * after video rec | [240, 240] | 240 | [240, 240] |
11965 * is stopped) | | | |
11966 * ---------------------------------------------------------------|
11967 * YES | [ 30, 240] | 240 | [240, 240] |
11968 * |-------------|-------------|----------------|
11969 * | [240, 240] | 240 | [240, 240] |
11970 * ---------------------------------------------------------------|
11971 * When Video stream is absent in configure_streams,
11972 * preview fps = sensor_fps / batchsize
11973 * Eg: for 240fps at batchSize 4, preview = 60fps
11974 * for 120fps at batchSize 4, preview = 30fps
11975 *
11976 * When video stream is present in configure_streams, preview fps is as per
11977 * the ratio of preview buffers to video buffers requested in process
11978 * capture request
11979 */
11980 mBatchSize = 0;
11981 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11982 fps_range.min_fps = fps_range.video_max_fps;
11983 fps_range.video_min_fps = fps_range.video_max_fps;
11984 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11985 fps_range.max_fps);
11986 if (NAME_NOT_FOUND != val) {
11987 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11988 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11989 return BAD_VALUE;
11990 }
11991
11992 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11993 /* If batchmode is currently in progress and the fps changes,
11994 * set the flag to restart the sensor */
11995 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11996 (mHFRVideoFps != fps_range.max_fps)) {
11997 mNeedSensorRestart = true;
11998 }
11999 mHFRVideoFps = fps_range.max_fps;
12000 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12001 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12002 mBatchSize = MAX_HFR_BATCH_SIZE;
12003 }
12004 }
12005 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12006
12007 }
12008 } else {
12009 /* HFR mode is session param in backend/ISP. This should be reset when
12010 * in non-HFR mode */
12011 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12012 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12013 return BAD_VALUE;
12014 }
12015 }
12016 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12017 return BAD_VALUE;
12018 }
12019 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12020 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12021 return rc;
12022}
12023
12024/*===========================================================================
12025 * FUNCTION : translateToHalMetadata
12026 *
12027 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12028 *
12029 *
12030 * PARAMETERS :
12031 * @request : request sent from framework
12032 *
12033 *
12034 * RETURN : success: NO_ERROR
12035 * failure:
12036 *==========================================================================*/
12037int QCamera3HardwareInterface::translateToHalMetadata
12038 (const camera3_capture_request_t *request,
12039 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012040 uint32_t snapshotStreamId) {
12041 if (request == nullptr || hal_metadata == nullptr) {
12042 return BAD_VALUE;
12043 }
12044
12045 int64_t minFrameDuration = getMinFrameDuration(request);
12046
12047 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12048 minFrameDuration);
12049}
12050
12051int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12052 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12053 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12054
Thierry Strudel3d639192016-09-09 11:52:26 -070012055 int rc = 0;
12056 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012057 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012058
12059 /* Do not change the order of the following list unless you know what you are
12060 * doing.
12061 * The order is laid out in such a way that parameters in the front of the table
12062 * may be used to override the parameters later in the table. Examples are:
12063 * 1. META_MODE should precede AEC/AWB/AF MODE
12064 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12065 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12066 * 4. Any mode should precede it's corresponding settings
12067 */
12068 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12069 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12070 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12071 rc = BAD_VALUE;
12072 }
12073 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12074 if (rc != NO_ERROR) {
12075 LOGE("extractSceneMode failed");
12076 }
12077 }
12078
12079 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12080 uint8_t fwk_aeMode =
12081 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12082 uint8_t aeMode;
12083 int32_t redeye;
12084
12085 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12086 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012087 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12088 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012089 } else {
12090 aeMode = CAM_AE_MODE_ON;
12091 }
12092 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12093 redeye = 1;
12094 } else {
12095 redeye = 0;
12096 }
12097
12098 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12099 fwk_aeMode);
12100 if (NAME_NOT_FOUND != val) {
12101 int32_t flashMode = (int32_t)val;
12102 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12103 }
12104
12105 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12106 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12107 rc = BAD_VALUE;
12108 }
12109 }
12110
12111 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12112 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12113 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12114 fwk_whiteLevel);
12115 if (NAME_NOT_FOUND != val) {
12116 uint8_t whiteLevel = (uint8_t)val;
12117 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12118 rc = BAD_VALUE;
12119 }
12120 }
12121 }
12122
12123 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12124 uint8_t fwk_cacMode =
12125 frame_settings.find(
12126 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12127 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12128 fwk_cacMode);
12129 if (NAME_NOT_FOUND != val) {
12130 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12131 bool entryAvailable = FALSE;
12132 // Check whether Frameworks set CAC mode is supported in device or not
12133 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12134 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12135 entryAvailable = TRUE;
12136 break;
12137 }
12138 }
12139 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12140 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12141 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12142 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12143 if (entryAvailable == FALSE) {
12144 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12145 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12146 } else {
12147 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12148 // High is not supported and so set the FAST as spec say's underlying
12149 // device implementation can be the same for both modes.
12150 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12151 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12152 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12153 // in order to avoid the fps drop due to high quality
12154 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12155 } else {
12156 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12157 }
12158 }
12159 }
12160 LOGD("Final cacMode is %d", cacMode);
12161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12162 rc = BAD_VALUE;
12163 }
12164 } else {
12165 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12166 }
12167 }
12168
Jason Lee84ae9972017-02-24 13:24:24 -080012169 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012170 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012171 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012172 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012173 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12174 fwk_focusMode);
12175 if (NAME_NOT_FOUND != val) {
12176 uint8_t focusMode = (uint8_t)val;
12177 LOGD("set focus mode %d", focusMode);
12178 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12179 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12180 rc = BAD_VALUE;
12181 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012182 }
12183 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012184 } else {
12185 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12186 LOGE("Focus forced to infinity %d", focusMode);
12187 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12188 rc = BAD_VALUE;
12189 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012190 }
12191
Jason Lee84ae9972017-02-24 13:24:24 -080012192 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12193 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012194 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12195 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12196 focalDistance)) {
12197 rc = BAD_VALUE;
12198 }
12199 }
12200
12201 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12202 uint8_t fwk_antibandingMode =
12203 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12204 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12205 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12206 if (NAME_NOT_FOUND != val) {
12207 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012208 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12209 if (m60HzZone) {
12210 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12211 } else {
12212 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12213 }
12214 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012215 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12216 hal_antibandingMode)) {
12217 rc = BAD_VALUE;
12218 }
12219 }
12220 }
12221
12222 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12223 int32_t expCompensation = frame_settings.find(
12224 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12225 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12226 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12227 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12228 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012229 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12231 expCompensation)) {
12232 rc = BAD_VALUE;
12233 }
12234 }
12235
12236 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12237 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12238 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12239 rc = BAD_VALUE;
12240 }
12241 }
12242 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12243 rc = setHalFpsRange(frame_settings, hal_metadata);
12244 if (rc != NO_ERROR) {
12245 LOGE("setHalFpsRange failed");
12246 }
12247 }
12248
12249 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12250 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12251 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12252 rc = BAD_VALUE;
12253 }
12254 }
12255
12256 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12257 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12258 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12259 fwk_effectMode);
12260 if (NAME_NOT_FOUND != val) {
12261 uint8_t effectMode = (uint8_t)val;
12262 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12263 rc = BAD_VALUE;
12264 }
12265 }
12266 }
12267
12268 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12269 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12270 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12271 colorCorrectMode)) {
12272 rc = BAD_VALUE;
12273 }
12274 }
12275
12276 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12277 cam_color_correct_gains_t colorCorrectGains;
12278 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12279 colorCorrectGains.gains[i] =
12280 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12281 }
12282 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12283 colorCorrectGains)) {
12284 rc = BAD_VALUE;
12285 }
12286 }
12287
12288 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12289 cam_color_correct_matrix_t colorCorrectTransform;
12290 cam_rational_type_t transform_elem;
12291 size_t num = 0;
12292 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12293 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12294 transform_elem.numerator =
12295 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12296 transform_elem.denominator =
12297 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12298 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12299 num++;
12300 }
12301 }
12302 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12303 colorCorrectTransform)) {
12304 rc = BAD_VALUE;
12305 }
12306 }
12307
12308 cam_trigger_t aecTrigger;
12309 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12310 aecTrigger.trigger_id = -1;
12311 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12312 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12313 aecTrigger.trigger =
12314 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12315 aecTrigger.trigger_id =
12316 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12317 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12318 aecTrigger)) {
12319 rc = BAD_VALUE;
12320 }
12321 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12322 aecTrigger.trigger, aecTrigger.trigger_id);
12323 }
12324
12325 /*af_trigger must come with a trigger id*/
12326 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12327 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12328 cam_trigger_t af_trigger;
12329 af_trigger.trigger =
12330 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12331 af_trigger.trigger_id =
12332 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12333 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12334 rc = BAD_VALUE;
12335 }
12336 LOGD("AfTrigger: %d AfTriggerID: %d",
12337 af_trigger.trigger, af_trigger.trigger_id);
12338 }
12339
12340 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12341 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12342 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12343 rc = BAD_VALUE;
12344 }
12345 }
12346 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12347 cam_edge_application_t edge_application;
12348 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012349
Thierry Strudel3d639192016-09-09 11:52:26 -070012350 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12351 edge_application.sharpness = 0;
12352 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012353 edge_application.sharpness =
12354 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12355 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12356 int32_t sharpness =
12357 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12358 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12359 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12360 LOGD("Setting edge mode sharpness %d", sharpness);
12361 edge_application.sharpness = sharpness;
12362 }
12363 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012364 }
12365 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12366 rc = BAD_VALUE;
12367 }
12368 }
12369
12370 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12371 int32_t respectFlashMode = 1;
12372 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12373 uint8_t fwk_aeMode =
12374 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012375 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12376 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12377 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012378 respectFlashMode = 0;
12379 LOGH("AE Mode controls flash, ignore android.flash.mode");
12380 }
12381 }
12382 if (respectFlashMode) {
12383 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12384 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12385 LOGH("flash mode after mapping %d", val);
12386 // To check: CAM_INTF_META_FLASH_MODE usage
12387 if (NAME_NOT_FOUND != val) {
12388 uint8_t flashMode = (uint8_t)val;
12389 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12390 rc = BAD_VALUE;
12391 }
12392 }
12393 }
12394 }
12395
12396 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12397 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12398 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12399 rc = BAD_VALUE;
12400 }
12401 }
12402
12403 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12404 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12405 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12406 flashFiringTime)) {
12407 rc = BAD_VALUE;
12408 }
12409 }
12410
12411 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12412 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12413 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12414 hotPixelMode)) {
12415 rc = BAD_VALUE;
12416 }
12417 }
12418
12419 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12420 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12421 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12422 lensAperture)) {
12423 rc = BAD_VALUE;
12424 }
12425 }
12426
12427 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12428 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12429 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12430 filterDensity)) {
12431 rc = BAD_VALUE;
12432 }
12433 }
12434
12435 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12436 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12437 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12438 focalLength)) {
12439 rc = BAD_VALUE;
12440 }
12441 }
12442
12443 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12444 uint8_t optStabMode =
12445 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12446 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12447 optStabMode)) {
12448 rc = BAD_VALUE;
12449 }
12450 }
12451
12452 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12453 uint8_t videoStabMode =
12454 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12455 LOGD("videoStabMode from APP = %d", videoStabMode);
12456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12457 videoStabMode)) {
12458 rc = BAD_VALUE;
12459 }
12460 }
12461
12462
12463 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12464 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12465 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12466 noiseRedMode)) {
12467 rc = BAD_VALUE;
12468 }
12469 }
12470
12471 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12472 float reprocessEffectiveExposureFactor =
12473 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12474 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12475 reprocessEffectiveExposureFactor)) {
12476 rc = BAD_VALUE;
12477 }
12478 }
12479
12480 cam_crop_region_t scalerCropRegion;
12481 bool scalerCropSet = false;
12482 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12483 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12484 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12485 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12486 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12487
12488 // Map coordinate system from active array to sensor output.
12489 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12490 scalerCropRegion.width, scalerCropRegion.height);
12491
12492 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12493 scalerCropRegion)) {
12494 rc = BAD_VALUE;
12495 }
12496 scalerCropSet = true;
12497 }
12498
12499 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12500 int64_t sensorExpTime =
12501 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12502 LOGD("setting sensorExpTime %lld", sensorExpTime);
12503 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12504 sensorExpTime)) {
12505 rc = BAD_VALUE;
12506 }
12507 }
12508
12509 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12510 int64_t sensorFrameDuration =
12511 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012512 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12513 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12514 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12515 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12516 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12517 sensorFrameDuration)) {
12518 rc = BAD_VALUE;
12519 }
12520 }
12521
12522 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12523 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12524 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12525 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12526 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12527 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12528 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12529 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12530 sensorSensitivity)) {
12531 rc = BAD_VALUE;
12532 }
12533 }
12534
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012535#ifndef USE_HAL_3_3
12536 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12537 int32_t ispSensitivity =
12538 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12539 if (ispSensitivity <
12540 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12541 ispSensitivity =
12542 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12543 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12544 }
12545 if (ispSensitivity >
12546 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12547 ispSensitivity =
12548 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12549 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12550 }
12551 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12552 ispSensitivity)) {
12553 rc = BAD_VALUE;
12554 }
12555 }
12556#endif
12557
Thierry Strudel3d639192016-09-09 11:52:26 -070012558 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12559 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12560 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12561 rc = BAD_VALUE;
12562 }
12563 }
12564
12565 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12566 uint8_t fwk_facedetectMode =
12567 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12568
12569 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12570 fwk_facedetectMode);
12571
12572 if (NAME_NOT_FOUND != val) {
12573 uint8_t facedetectMode = (uint8_t)val;
12574 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12575 facedetectMode)) {
12576 rc = BAD_VALUE;
12577 }
12578 }
12579 }
12580
Thierry Strudel54dc9782017-02-15 12:12:10 -080012581 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012582 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012583 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012584 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12585 histogramMode)) {
12586 rc = BAD_VALUE;
12587 }
12588 }
12589
12590 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12591 uint8_t sharpnessMapMode =
12592 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12593 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12594 sharpnessMapMode)) {
12595 rc = BAD_VALUE;
12596 }
12597 }
12598
12599 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12600 uint8_t tonemapMode =
12601 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12602 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12603 rc = BAD_VALUE;
12604 }
12605 }
12606 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12607 /*All tonemap channels will have the same number of points*/
12608 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12609 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12610 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12611 cam_rgb_tonemap_curves tonemapCurves;
12612 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12613 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12614 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12615 tonemapCurves.tonemap_points_cnt,
12616 CAM_MAX_TONEMAP_CURVE_SIZE);
12617 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12618 }
12619
12620 /* ch0 = G*/
12621 size_t point = 0;
12622 cam_tonemap_curve_t tonemapCurveGreen;
12623 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12624 for (size_t j = 0; j < 2; j++) {
12625 tonemapCurveGreen.tonemap_points[i][j] =
12626 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12627 point++;
12628 }
12629 }
12630 tonemapCurves.curves[0] = tonemapCurveGreen;
12631
12632 /* ch 1 = B */
12633 point = 0;
12634 cam_tonemap_curve_t tonemapCurveBlue;
12635 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12636 for (size_t j = 0; j < 2; j++) {
12637 tonemapCurveBlue.tonemap_points[i][j] =
12638 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12639 point++;
12640 }
12641 }
12642 tonemapCurves.curves[1] = tonemapCurveBlue;
12643
12644 /* ch 2 = R */
12645 point = 0;
12646 cam_tonemap_curve_t tonemapCurveRed;
12647 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12648 for (size_t j = 0; j < 2; j++) {
12649 tonemapCurveRed.tonemap_points[i][j] =
12650 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12651 point++;
12652 }
12653 }
12654 tonemapCurves.curves[2] = tonemapCurveRed;
12655
12656 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12657 tonemapCurves)) {
12658 rc = BAD_VALUE;
12659 }
12660 }
12661
12662 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12663 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12664 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12665 captureIntent)) {
12666 rc = BAD_VALUE;
12667 }
12668 }
12669
12670 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12671 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12672 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12673 blackLevelLock)) {
12674 rc = BAD_VALUE;
12675 }
12676 }
12677
12678 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12679 uint8_t lensShadingMapMode =
12680 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12681 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12682 lensShadingMapMode)) {
12683 rc = BAD_VALUE;
12684 }
12685 }
12686
12687 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12688 cam_area_t roi;
12689 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012690 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012691
12692 // Map coordinate system from active array to sensor output.
12693 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12694 roi.rect.height);
12695
12696 if (scalerCropSet) {
12697 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12698 }
12699 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12700 rc = BAD_VALUE;
12701 }
12702 }
12703
12704 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12705 cam_area_t roi;
12706 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012707 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012708
12709 // Map coordinate system from active array to sensor output.
12710 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12711 roi.rect.height);
12712
12713 if (scalerCropSet) {
12714 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12715 }
12716 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12717 rc = BAD_VALUE;
12718 }
12719 }
12720
12721 // CDS for non-HFR non-video mode
12722 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12723 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12724 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12725 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12726 LOGE("Invalid CDS mode %d!", *fwk_cds);
12727 } else {
12728 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12729 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12730 rc = BAD_VALUE;
12731 }
12732 }
12733 }
12734
Thierry Strudel04e026f2016-10-10 11:27:36 -070012735 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012736 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012737 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012738 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12739 }
12740 if (m_bVideoHdrEnabled)
12741 vhdr = CAM_VIDEO_HDR_MODE_ON;
12742
Thierry Strudel54dc9782017-02-15 12:12:10 -080012743 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12744
12745 if(vhdr != curr_hdr_state)
12746 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12747
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012748 rc = setVideoHdrMode(mParameters, vhdr);
12749 if (rc != NO_ERROR) {
12750 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012751 }
12752
12753 //IR
12754 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12755 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12756 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012757 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12758 uint8_t isIRon = 0;
12759
12760 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012761 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12762 LOGE("Invalid IR mode %d!", fwk_ir);
12763 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012764 if(isIRon != curr_ir_state )
12765 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12766
Thierry Strudel04e026f2016-10-10 11:27:36 -070012767 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12768 CAM_INTF_META_IR_MODE, fwk_ir)) {
12769 rc = BAD_VALUE;
12770 }
12771 }
12772 }
12773
Thierry Strudel54dc9782017-02-15 12:12:10 -080012774 //Binning Correction Mode
12775 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12776 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12777 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12778 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12779 || (0 > fwk_binning_correction)) {
12780 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12781 } else {
12782 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12783 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12784 rc = BAD_VALUE;
12785 }
12786 }
12787 }
12788
Thierry Strudel269c81a2016-10-12 12:13:59 -070012789 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12790 float aec_speed;
12791 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12792 LOGD("AEC Speed :%f", aec_speed);
12793 if ( aec_speed < 0 ) {
12794 LOGE("Invalid AEC mode %f!", aec_speed);
12795 } else {
12796 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12797 aec_speed)) {
12798 rc = BAD_VALUE;
12799 }
12800 }
12801 }
12802
12803 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12804 float awb_speed;
12805 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12806 LOGD("AWB Speed :%f", awb_speed);
12807 if ( awb_speed < 0 ) {
12808 LOGE("Invalid AWB mode %f!", awb_speed);
12809 } else {
12810 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12811 awb_speed)) {
12812 rc = BAD_VALUE;
12813 }
12814 }
12815 }
12816
Thierry Strudel3d639192016-09-09 11:52:26 -070012817 // TNR
12818 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12819 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12820 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012821 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012822 cam_denoise_param_t tnr;
12823 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12824 tnr.process_plates =
12825 (cam_denoise_process_type_t)frame_settings.find(
12826 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12827 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012828
12829 if(b_TnrRequested != curr_tnr_state)
12830 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12831
Thierry Strudel3d639192016-09-09 11:52:26 -070012832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12833 rc = BAD_VALUE;
12834 }
12835 }
12836
Thierry Strudel54dc9782017-02-15 12:12:10 -080012837 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012838 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012839 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12841 *exposure_metering_mode)) {
12842 rc = BAD_VALUE;
12843 }
12844 }
12845
Thierry Strudel3d639192016-09-09 11:52:26 -070012846 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12847 int32_t fwk_testPatternMode =
12848 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12849 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12850 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12851
12852 if (NAME_NOT_FOUND != testPatternMode) {
12853 cam_test_pattern_data_t testPatternData;
12854 memset(&testPatternData, 0, sizeof(testPatternData));
12855 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12856 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12857 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12858 int32_t *fwk_testPatternData =
12859 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12860 testPatternData.r = fwk_testPatternData[0];
12861 testPatternData.b = fwk_testPatternData[3];
12862 switch (gCamCapability[mCameraId]->color_arrangement) {
12863 case CAM_FILTER_ARRANGEMENT_RGGB:
12864 case CAM_FILTER_ARRANGEMENT_GRBG:
12865 testPatternData.gr = fwk_testPatternData[1];
12866 testPatternData.gb = fwk_testPatternData[2];
12867 break;
12868 case CAM_FILTER_ARRANGEMENT_GBRG:
12869 case CAM_FILTER_ARRANGEMENT_BGGR:
12870 testPatternData.gr = fwk_testPatternData[2];
12871 testPatternData.gb = fwk_testPatternData[1];
12872 break;
12873 default:
12874 LOGE("color arrangement %d is not supported",
12875 gCamCapability[mCameraId]->color_arrangement);
12876 break;
12877 }
12878 }
12879 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12880 testPatternData)) {
12881 rc = BAD_VALUE;
12882 }
12883 } else {
12884 LOGE("Invalid framework sensor test pattern mode %d",
12885 fwk_testPatternMode);
12886 }
12887 }
12888
12889 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12890 size_t count = 0;
12891 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12892 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12893 gps_coords.data.d, gps_coords.count, count);
12894 if (gps_coords.count != count) {
12895 rc = BAD_VALUE;
12896 }
12897 }
12898
12899 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12900 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12901 size_t count = 0;
12902 const char *gps_methods_src = (const char *)
12903 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12904 memset(gps_methods, '\0', sizeof(gps_methods));
12905 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12906 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12907 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12908 if (GPS_PROCESSING_METHOD_SIZE != count) {
12909 rc = BAD_VALUE;
12910 }
12911 }
12912
12913 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12914 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12916 gps_timestamp)) {
12917 rc = BAD_VALUE;
12918 }
12919 }
12920
12921 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12922 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12923 cam_rotation_info_t rotation_info;
12924 if (orientation == 0) {
12925 rotation_info.rotation = ROTATE_0;
12926 } else if (orientation == 90) {
12927 rotation_info.rotation = ROTATE_90;
12928 } else if (orientation == 180) {
12929 rotation_info.rotation = ROTATE_180;
12930 } else if (orientation == 270) {
12931 rotation_info.rotation = ROTATE_270;
12932 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012933 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012934 rotation_info.streamId = snapshotStreamId;
12935 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12936 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12937 rc = BAD_VALUE;
12938 }
12939 }
12940
12941 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12942 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12943 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12944 rc = BAD_VALUE;
12945 }
12946 }
12947
12948 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12949 uint32_t thumb_quality = (uint32_t)
12950 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12951 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12952 thumb_quality)) {
12953 rc = BAD_VALUE;
12954 }
12955 }
12956
12957 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12958 cam_dimension_t dim;
12959 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12960 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12961 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12962 rc = BAD_VALUE;
12963 }
12964 }
12965
12966 // Internal metadata
12967 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12968 size_t count = 0;
12969 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12970 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12971 privatedata.data.i32, privatedata.count, count);
12972 if (privatedata.count != count) {
12973 rc = BAD_VALUE;
12974 }
12975 }
12976
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012977 // ISO/Exposure Priority
12978 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12979 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12980 cam_priority_mode_t mode =
12981 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12982 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12983 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12984 use_iso_exp_pty.previewOnly = FALSE;
12985 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12986 use_iso_exp_pty.value = *ptr;
12987
12988 if(CAM_ISO_PRIORITY == mode) {
12989 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12990 use_iso_exp_pty)) {
12991 rc = BAD_VALUE;
12992 }
12993 }
12994 else {
12995 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12996 use_iso_exp_pty)) {
12997 rc = BAD_VALUE;
12998 }
12999 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013000
13001 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13002 rc = BAD_VALUE;
13003 }
13004 }
13005 } else {
13006 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13007 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013008 }
13009 }
13010
13011 // Saturation
13012 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13013 int32_t* use_saturation =
13014 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13015 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13016 rc = BAD_VALUE;
13017 }
13018 }
13019
Thierry Strudel3d639192016-09-09 11:52:26 -070013020 // EV step
13021 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13022 gCamCapability[mCameraId]->exp_compensation_step)) {
13023 rc = BAD_VALUE;
13024 }
13025
13026 // CDS info
13027 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13028 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13029 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13030
13031 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13032 CAM_INTF_META_CDS_DATA, *cdsData)) {
13033 rc = BAD_VALUE;
13034 }
13035 }
13036
Shuzhen Wang19463d72016-03-08 11:09:52 -080013037 // Hybrid AE
13038 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13039 uint8_t *hybrid_ae = (uint8_t *)
13040 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13041
13042 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13043 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13044 rc = BAD_VALUE;
13045 }
13046 }
13047
Shuzhen Wang14415f52016-11-16 18:26:18 -080013048 // Histogram
13049 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13050 uint8_t histogramMode =
13051 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13052 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13053 histogramMode)) {
13054 rc = BAD_VALUE;
13055 }
13056 }
13057
13058 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13059 int32_t histogramBins =
13060 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13061 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13062 histogramBins)) {
13063 rc = BAD_VALUE;
13064 }
13065 }
13066
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013067 // Tracking AF
13068 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13069 uint8_t trackingAfTrigger =
13070 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13071 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13072 trackingAfTrigger)) {
13073 rc = BAD_VALUE;
13074 }
13075 }
13076
Thierry Strudel3d639192016-09-09 11:52:26 -070013077 return rc;
13078}
13079
13080/*===========================================================================
13081 * FUNCTION : captureResultCb
13082 *
13083 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13084 *
13085 * PARAMETERS :
13086 * @frame : frame information from mm-camera-interface
13087 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13088 * @userdata: userdata
13089 *
13090 * RETURN : NONE
13091 *==========================================================================*/
13092void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13093 camera3_stream_buffer_t *buffer,
13094 uint32_t frame_number, bool isInputBuffer, void *userdata)
13095{
13096 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13097 if (hw == NULL) {
13098 LOGE("Invalid hw %p", hw);
13099 return;
13100 }
13101
13102 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13103 return;
13104}
13105
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013106/*===========================================================================
13107 * FUNCTION : setBufferErrorStatus
13108 *
13109 * DESCRIPTION: Callback handler for channels to report any buffer errors
13110 *
13111 * PARAMETERS :
13112 * @ch : Channel on which buffer error is reported from
13113 * @frame_number : frame number on which buffer error is reported on
13114 * @buffer_status : buffer error status
13115 * @userdata: userdata
13116 *
13117 * RETURN : NONE
13118 *==========================================================================*/
13119void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13120 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13121{
13122 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13123 if (hw == NULL) {
13124 LOGE("Invalid hw %p", hw);
13125 return;
13126 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013127
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013128 hw->setBufferErrorStatus(ch, frame_number, err);
13129 return;
13130}
13131
13132void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13133 uint32_t frameNumber, camera3_buffer_status_t err)
13134{
13135 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13136 pthread_mutex_lock(&mMutex);
13137
13138 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13139 if (req.frame_number != frameNumber)
13140 continue;
13141 for (auto& k : req.mPendingBufferList) {
13142 if(k.stream->priv == ch) {
13143 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13144 }
13145 }
13146 }
13147
13148 pthread_mutex_unlock(&mMutex);
13149 return;
13150}
Thierry Strudel3d639192016-09-09 11:52:26 -070013151/*===========================================================================
13152 * FUNCTION : initialize
13153 *
13154 * DESCRIPTION: Pass framework callback pointers to HAL
13155 *
13156 * PARAMETERS :
13157 *
13158 *
13159 * RETURN : Success : 0
13160 * Failure: -ENODEV
13161 *==========================================================================*/
13162
13163int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13164 const camera3_callback_ops_t *callback_ops)
13165{
13166 LOGD("E");
13167 QCamera3HardwareInterface *hw =
13168 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13169 if (!hw) {
13170 LOGE("NULL camera device");
13171 return -ENODEV;
13172 }
13173
13174 int rc = hw->initialize(callback_ops);
13175 LOGD("X");
13176 return rc;
13177}
13178
13179/*===========================================================================
13180 * FUNCTION : configure_streams
13181 *
13182 * DESCRIPTION:
13183 *
13184 * PARAMETERS :
13185 *
13186 *
13187 * RETURN : Success: 0
13188 * Failure: -EINVAL (if stream configuration is invalid)
13189 * -ENODEV (fatal error)
13190 *==========================================================================*/
13191
13192int QCamera3HardwareInterface::configure_streams(
13193 const struct camera3_device *device,
13194 camera3_stream_configuration_t *stream_list)
13195{
13196 LOGD("E");
13197 QCamera3HardwareInterface *hw =
13198 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13199 if (!hw) {
13200 LOGE("NULL camera device");
13201 return -ENODEV;
13202 }
13203 int rc = hw->configureStreams(stream_list);
13204 LOGD("X");
13205 return rc;
13206}
13207
13208/*===========================================================================
13209 * FUNCTION : construct_default_request_settings
13210 *
13211 * DESCRIPTION: Configure a settings buffer to meet the required use case
13212 *
13213 * PARAMETERS :
13214 *
13215 *
13216 * RETURN : Success: Return valid metadata
13217 * Failure: Return NULL
13218 *==========================================================================*/
13219const camera_metadata_t* QCamera3HardwareInterface::
13220 construct_default_request_settings(const struct camera3_device *device,
13221 int type)
13222{
13223
13224 LOGD("E");
13225 camera_metadata_t* fwk_metadata = NULL;
13226 QCamera3HardwareInterface *hw =
13227 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13228 if (!hw) {
13229 LOGE("NULL camera device");
13230 return NULL;
13231 }
13232
13233 fwk_metadata = hw->translateCapabilityToMetadata(type);
13234
13235 LOGD("X");
13236 return fwk_metadata;
13237}
13238
13239/*===========================================================================
13240 * FUNCTION : process_capture_request
13241 *
13242 * DESCRIPTION:
13243 *
13244 * PARAMETERS :
13245 *
13246 *
13247 * RETURN :
13248 *==========================================================================*/
13249int QCamera3HardwareInterface::process_capture_request(
13250 const struct camera3_device *device,
13251 camera3_capture_request_t *request)
13252{
13253 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013254 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013255 QCamera3HardwareInterface *hw =
13256 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13257 if (!hw) {
13258 LOGE("NULL camera device");
13259 return -EINVAL;
13260 }
13261
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013262 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013263 LOGD("X");
13264 return rc;
13265}
13266
13267/*===========================================================================
13268 * FUNCTION : dump
13269 *
13270 * DESCRIPTION:
13271 *
13272 * PARAMETERS :
13273 *
13274 *
13275 * RETURN :
13276 *==========================================================================*/
13277
13278void QCamera3HardwareInterface::dump(
13279 const struct camera3_device *device, int fd)
13280{
13281 /* Log level property is read when "adb shell dumpsys media.camera" is
13282 called so that the log level can be controlled without restarting
13283 the media server */
13284 getLogLevel();
13285
13286 LOGD("E");
13287 QCamera3HardwareInterface *hw =
13288 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13289 if (!hw) {
13290 LOGE("NULL camera device");
13291 return;
13292 }
13293
13294 hw->dump(fd);
13295 LOGD("X");
13296 return;
13297}
13298
13299/*===========================================================================
13300 * FUNCTION : flush
13301 *
13302 * DESCRIPTION:
13303 *
13304 * PARAMETERS :
13305 *
13306 *
13307 * RETURN :
13308 *==========================================================================*/
13309
13310int QCamera3HardwareInterface::flush(
13311 const struct camera3_device *device)
13312{
13313 int rc;
13314 LOGD("E");
13315 QCamera3HardwareInterface *hw =
13316 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13317 if (!hw) {
13318 LOGE("NULL camera device");
13319 return -EINVAL;
13320 }
13321
13322 pthread_mutex_lock(&hw->mMutex);
13323 // Validate current state
13324 switch (hw->mState) {
13325 case STARTED:
13326 /* valid state */
13327 break;
13328
13329 case ERROR:
13330 pthread_mutex_unlock(&hw->mMutex);
13331 hw->handleCameraDeviceError();
13332 return -ENODEV;
13333
13334 default:
13335 LOGI("Flush returned during state %d", hw->mState);
13336 pthread_mutex_unlock(&hw->mMutex);
13337 return 0;
13338 }
13339 pthread_mutex_unlock(&hw->mMutex);
13340
13341 rc = hw->flush(true /* restart channels */ );
13342 LOGD("X");
13343 return rc;
13344}
13345
13346/*===========================================================================
13347 * FUNCTION : close_camera_device
13348 *
13349 * DESCRIPTION:
13350 *
13351 * PARAMETERS :
13352 *
13353 *
13354 * RETURN :
13355 *==========================================================================*/
13356int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13357{
13358 int ret = NO_ERROR;
13359 QCamera3HardwareInterface *hw =
13360 reinterpret_cast<QCamera3HardwareInterface *>(
13361 reinterpret_cast<camera3_device_t *>(device)->priv);
13362 if (!hw) {
13363 LOGE("NULL camera device");
13364 return BAD_VALUE;
13365 }
13366
13367 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13368 delete hw;
13369 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013370 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013371 return ret;
13372}
13373
13374/*===========================================================================
13375 * FUNCTION : getWaveletDenoiseProcessPlate
13376 *
13377 * DESCRIPTION: query wavelet denoise process plate
13378 *
13379 * PARAMETERS : None
13380 *
13381 * RETURN : WNR prcocess plate value
13382 *==========================================================================*/
13383cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13384{
13385 char prop[PROPERTY_VALUE_MAX];
13386 memset(prop, 0, sizeof(prop));
13387 property_get("persist.denoise.process.plates", prop, "0");
13388 int processPlate = atoi(prop);
13389 switch(processPlate) {
13390 case 0:
13391 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13392 case 1:
13393 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13394 case 2:
13395 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13396 case 3:
13397 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13398 default:
13399 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13400 }
13401}
13402
13403
13404/*===========================================================================
13405 * FUNCTION : getTemporalDenoiseProcessPlate
13406 *
13407 * DESCRIPTION: query temporal denoise process plate
13408 *
13409 * PARAMETERS : None
13410 *
13411 * RETURN : TNR prcocess plate value
13412 *==========================================================================*/
13413cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13414{
13415 char prop[PROPERTY_VALUE_MAX];
13416 memset(prop, 0, sizeof(prop));
13417 property_get("persist.tnr.process.plates", prop, "0");
13418 int processPlate = atoi(prop);
13419 switch(processPlate) {
13420 case 0:
13421 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13422 case 1:
13423 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13424 case 2:
13425 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13426 case 3:
13427 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13428 default:
13429 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13430 }
13431}
13432
13433
13434/*===========================================================================
13435 * FUNCTION : extractSceneMode
13436 *
13437 * DESCRIPTION: Extract scene mode from frameworks set metadata
13438 *
13439 * PARAMETERS :
13440 * @frame_settings: CameraMetadata reference
13441 * @metaMode: ANDROID_CONTORL_MODE
13442 * @hal_metadata: hal metadata structure
13443 *
13444 * RETURN : None
13445 *==========================================================================*/
13446int32_t QCamera3HardwareInterface::extractSceneMode(
13447 const CameraMetadata &frame_settings, uint8_t metaMode,
13448 metadata_buffer_t *hal_metadata)
13449{
13450 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013451 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13452
13453 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13454 LOGD("Ignoring control mode OFF_KEEP_STATE");
13455 return NO_ERROR;
13456 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013457
13458 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13459 camera_metadata_ro_entry entry =
13460 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13461 if (0 == entry.count)
13462 return rc;
13463
13464 uint8_t fwk_sceneMode = entry.data.u8[0];
13465
13466 int val = lookupHalName(SCENE_MODES_MAP,
13467 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13468 fwk_sceneMode);
13469 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013470 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013471 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013472 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013473 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013474
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013475 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13476 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13477 }
13478
13479 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13480 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013481 cam_hdr_param_t hdr_params;
13482 hdr_params.hdr_enable = 1;
13483 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13484 hdr_params.hdr_need_1x = false;
13485 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13486 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13487 rc = BAD_VALUE;
13488 }
13489 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013490
Thierry Strudel3d639192016-09-09 11:52:26 -070013491 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13492 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13493 rc = BAD_VALUE;
13494 }
13495 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013496
13497 if (mForceHdrSnapshot) {
13498 cam_hdr_param_t hdr_params;
13499 hdr_params.hdr_enable = 1;
13500 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13501 hdr_params.hdr_need_1x = false;
13502 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13503 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13504 rc = BAD_VALUE;
13505 }
13506 }
13507
Thierry Strudel3d639192016-09-09 11:52:26 -070013508 return rc;
13509}
13510
13511/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013512 * FUNCTION : setVideoHdrMode
13513 *
13514 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13515 *
13516 * PARAMETERS :
13517 * @hal_metadata: hal metadata structure
13518 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13519 *
13520 * RETURN : None
13521 *==========================================================================*/
13522int32_t QCamera3HardwareInterface::setVideoHdrMode(
13523 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13524{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013525 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13526 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13527 }
13528
13529 LOGE("Invalid Video HDR mode %d!", vhdr);
13530 return BAD_VALUE;
13531}
13532
13533/*===========================================================================
13534 * FUNCTION : setSensorHDR
13535 *
13536 * DESCRIPTION: Enable/disable sensor HDR.
13537 *
13538 * PARAMETERS :
13539 * @hal_metadata: hal metadata structure
13540 * @enable: boolean whether to enable/disable sensor HDR
13541 *
13542 * RETURN : None
13543 *==========================================================================*/
13544int32_t QCamera3HardwareInterface::setSensorHDR(
13545 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13546{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013547 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013548 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13549
13550 if (enable) {
13551 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13552 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13553 #ifdef _LE_CAMERA_
13554 //Default to staggered HDR for IOT
13555 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13556 #else
13557 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13558 #endif
13559 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13560 }
13561
13562 bool isSupported = false;
13563 switch (sensor_hdr) {
13564 case CAM_SENSOR_HDR_IN_SENSOR:
13565 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13566 CAM_QCOM_FEATURE_SENSOR_HDR) {
13567 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013568 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013569 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013570 break;
13571 case CAM_SENSOR_HDR_ZIGZAG:
13572 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13573 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13574 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013575 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013576 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013577 break;
13578 case CAM_SENSOR_HDR_STAGGERED:
13579 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13580 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13581 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013582 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013583 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013584 break;
13585 case CAM_SENSOR_HDR_OFF:
13586 isSupported = true;
13587 LOGD("Turning off sensor HDR");
13588 break;
13589 default:
13590 LOGE("HDR mode %d not supported", sensor_hdr);
13591 rc = BAD_VALUE;
13592 break;
13593 }
13594
13595 if(isSupported) {
13596 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13597 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13598 rc = BAD_VALUE;
13599 } else {
13600 if(!isVideoHdrEnable)
13601 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013602 }
13603 }
13604 return rc;
13605}
13606
13607/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013608 * FUNCTION : needRotationReprocess
13609 *
13610 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13611 *
13612 * PARAMETERS : none
13613 *
13614 * RETURN : true: needed
13615 * false: no need
13616 *==========================================================================*/
13617bool QCamera3HardwareInterface::needRotationReprocess()
13618{
13619 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13620 // current rotation is not zero, and pp has the capability to process rotation
13621 LOGH("need do reprocess for rotation");
13622 return true;
13623 }
13624
13625 return false;
13626}
13627
13628/*===========================================================================
13629 * FUNCTION : needReprocess
13630 *
13631 * DESCRIPTION: if reprocess in needed
13632 *
13633 * PARAMETERS : none
13634 *
13635 * RETURN : true: needed
13636 * false: no need
13637 *==========================================================================*/
13638bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13639{
13640 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13641 // TODO: add for ZSL HDR later
13642 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13643 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13644 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13645 return true;
13646 } else {
13647 LOGH("already post processed frame");
13648 return false;
13649 }
13650 }
13651 return needRotationReprocess();
13652}
13653
13654/*===========================================================================
13655 * FUNCTION : needJpegExifRotation
13656 *
13657 * DESCRIPTION: if rotation from jpeg is needed
13658 *
13659 * PARAMETERS : none
13660 *
13661 * RETURN : true: needed
13662 * false: no need
13663 *==========================================================================*/
13664bool QCamera3HardwareInterface::needJpegExifRotation()
13665{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013666 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013667 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13668 LOGD("Need use Jpeg EXIF Rotation");
13669 return true;
13670 }
13671 return false;
13672}
13673
13674/*===========================================================================
13675 * FUNCTION : addOfflineReprocChannel
13676 *
13677 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13678 * coming from input channel
13679 *
13680 * PARAMETERS :
13681 * @config : reprocess configuration
13682 * @inputChHandle : pointer to the input (source) channel
13683 *
13684 *
13685 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13686 *==========================================================================*/
13687QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13688 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13689{
13690 int32_t rc = NO_ERROR;
13691 QCamera3ReprocessChannel *pChannel = NULL;
13692
13693 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013694 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13695 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013696 if (NULL == pChannel) {
13697 LOGE("no mem for reprocess channel");
13698 return NULL;
13699 }
13700
13701 rc = pChannel->initialize(IS_TYPE_NONE);
13702 if (rc != NO_ERROR) {
13703 LOGE("init reprocess channel failed, ret = %d", rc);
13704 delete pChannel;
13705 return NULL;
13706 }
13707
13708 // pp feature config
13709 cam_pp_feature_config_t pp_config;
13710 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13711
13712 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13713 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13714 & CAM_QCOM_FEATURE_DSDN) {
13715 //Use CPP CDS incase h/w supports it.
13716 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13717 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13718 }
13719 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13720 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13721 }
13722
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013723 if (config.hdr_param.hdr_enable) {
13724 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13725 pp_config.hdr_param = config.hdr_param;
13726 }
13727
13728 if (mForceHdrSnapshot) {
13729 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13730 pp_config.hdr_param.hdr_enable = 1;
13731 pp_config.hdr_param.hdr_need_1x = 0;
13732 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13733 }
13734
Thierry Strudel3d639192016-09-09 11:52:26 -070013735 rc = pChannel->addReprocStreamsFromSource(pp_config,
13736 config,
13737 IS_TYPE_NONE,
13738 mMetadataChannel);
13739
13740 if (rc != NO_ERROR) {
13741 delete pChannel;
13742 return NULL;
13743 }
13744 return pChannel;
13745}
13746
13747/*===========================================================================
13748 * FUNCTION : getMobicatMask
13749 *
13750 * DESCRIPTION: returns mobicat mask
13751 *
13752 * PARAMETERS : none
13753 *
13754 * RETURN : mobicat mask
13755 *
13756 *==========================================================================*/
13757uint8_t QCamera3HardwareInterface::getMobicatMask()
13758{
13759 return m_MobicatMask;
13760}
13761
13762/*===========================================================================
13763 * FUNCTION : setMobicat
13764 *
13765 * DESCRIPTION: set Mobicat on/off.
13766 *
13767 * PARAMETERS :
13768 * @params : none
13769 *
13770 * RETURN : int32_t type of status
13771 * NO_ERROR -- success
13772 * none-zero failure code
13773 *==========================================================================*/
13774int32_t QCamera3HardwareInterface::setMobicat()
13775{
Thierry Strudel3d639192016-09-09 11:52:26 -070013776 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013777
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013778 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013779 tune_cmd_t tune_cmd;
13780 tune_cmd.type = SET_RELOAD_CHROMATIX;
13781 tune_cmd.module = MODULE_ALL;
13782 tune_cmd.value = TRUE;
13783 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13784 CAM_INTF_PARM_SET_VFE_COMMAND,
13785 tune_cmd);
13786
13787 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13788 CAM_INTF_PARM_SET_PP_COMMAND,
13789 tune_cmd);
13790 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013791
13792 return ret;
13793}
13794
13795/*===========================================================================
13796* FUNCTION : getLogLevel
13797*
13798* DESCRIPTION: Reads the log level property into a variable
13799*
13800* PARAMETERS :
13801* None
13802*
13803* RETURN :
13804* None
13805*==========================================================================*/
13806void QCamera3HardwareInterface::getLogLevel()
13807{
13808 char prop[PROPERTY_VALUE_MAX];
13809 uint32_t globalLogLevel = 0;
13810
13811 property_get("persist.camera.hal.debug", prop, "0");
13812 int val = atoi(prop);
13813 if (0 <= val) {
13814 gCamHal3LogLevel = (uint32_t)val;
13815 }
13816
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013817 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013818 gKpiDebugLevel = atoi(prop);
13819
13820 property_get("persist.camera.global.debug", prop, "0");
13821 val = atoi(prop);
13822 if (0 <= val) {
13823 globalLogLevel = (uint32_t)val;
13824 }
13825
13826 /* Highest log level among hal.logs and global.logs is selected */
13827 if (gCamHal3LogLevel < globalLogLevel)
13828 gCamHal3LogLevel = globalLogLevel;
13829
13830 return;
13831}
13832
13833/*===========================================================================
13834 * FUNCTION : validateStreamRotations
13835 *
13836 * DESCRIPTION: Check if the rotations requested are supported
13837 *
13838 * PARAMETERS :
13839 * @stream_list : streams to be configured
13840 *
13841 * RETURN : NO_ERROR on success
13842 * -EINVAL on failure
13843 *
13844 *==========================================================================*/
13845int QCamera3HardwareInterface::validateStreamRotations(
13846 camera3_stream_configuration_t *streamList)
13847{
13848 int rc = NO_ERROR;
13849
13850 /*
13851 * Loop through all streams requested in configuration
13852 * Check if unsupported rotations have been requested on any of them
13853 */
13854 for (size_t j = 0; j < streamList->num_streams; j++){
13855 camera3_stream_t *newStream = streamList->streams[j];
13856
Emilian Peev35ceeed2017-06-29 11:58:56 -070013857 switch(newStream->rotation) {
13858 case CAMERA3_STREAM_ROTATION_0:
13859 case CAMERA3_STREAM_ROTATION_90:
13860 case CAMERA3_STREAM_ROTATION_180:
13861 case CAMERA3_STREAM_ROTATION_270:
13862 //Expected values
13863 break;
13864 default:
13865 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13866 "type:%d and stream format:%d", __func__,
13867 newStream->rotation, newStream->stream_type,
13868 newStream->format);
13869 return -EINVAL;
13870 }
13871
Thierry Strudel3d639192016-09-09 11:52:26 -070013872 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13873 bool isImplDef = (newStream->format ==
13874 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13875 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13876 isImplDef);
13877
13878 if (isRotated && (!isImplDef || isZsl)) {
13879 LOGE("Error: Unsupported rotation of %d requested for stream"
13880 "type:%d and stream format:%d",
13881 newStream->rotation, newStream->stream_type,
13882 newStream->format);
13883 rc = -EINVAL;
13884 break;
13885 }
13886 }
13887
13888 return rc;
13889}
13890
13891/*===========================================================================
13892* FUNCTION : getFlashInfo
13893*
13894* DESCRIPTION: Retrieve information about whether the device has a flash.
13895*
13896* PARAMETERS :
13897* @cameraId : Camera id to query
13898* @hasFlash : Boolean indicating whether there is a flash device
13899* associated with given camera
13900* @flashNode : If a flash device exists, this will be its device node.
13901*
13902* RETURN :
13903* None
13904*==========================================================================*/
13905void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13906 bool& hasFlash,
13907 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13908{
13909 cam_capability_t* camCapability = gCamCapability[cameraId];
13910 if (NULL == camCapability) {
13911 hasFlash = false;
13912 flashNode[0] = '\0';
13913 } else {
13914 hasFlash = camCapability->flash_available;
13915 strlcpy(flashNode,
13916 (char*)camCapability->flash_dev_name,
13917 QCAMERA_MAX_FILEPATH_LENGTH);
13918 }
13919}
13920
13921/*===========================================================================
13922* FUNCTION : getEepromVersionInfo
13923*
13924* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13925*
13926* PARAMETERS : None
13927*
13928* RETURN : string describing EEPROM version
13929* "\0" if no such info available
13930*==========================================================================*/
13931const char *QCamera3HardwareInterface::getEepromVersionInfo()
13932{
13933 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13934}
13935
13936/*===========================================================================
13937* FUNCTION : getLdafCalib
13938*
13939* DESCRIPTION: Retrieve Laser AF calibration data
13940*
13941* PARAMETERS : None
13942*
13943* RETURN : Two uint32_t describing laser AF calibration data
13944* NULL if none is available.
13945*==========================================================================*/
13946const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13947{
13948 if (mLdafCalibExist) {
13949 return &mLdafCalib[0];
13950 } else {
13951 return NULL;
13952 }
13953}
13954
13955/*===========================================================================
13956 * FUNCTION : dynamicUpdateMetaStreamInfo
13957 *
13958 * DESCRIPTION: This function:
13959 * (1) stops all the channels
13960 * (2) returns error on pending requests and buffers
13961 * (3) sends metastream_info in setparams
13962 * (4) starts all channels
13963 * This is useful when sensor has to be restarted to apply any
13964 * settings such as frame rate from a different sensor mode
13965 *
13966 * PARAMETERS : None
13967 *
13968 * RETURN : NO_ERROR on success
13969 * Error codes on failure
13970 *
13971 *==========================================================================*/
13972int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13973{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013974 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013975 int rc = NO_ERROR;
13976
13977 LOGD("E");
13978
13979 rc = stopAllChannels();
13980 if (rc < 0) {
13981 LOGE("stopAllChannels failed");
13982 return rc;
13983 }
13984
13985 rc = notifyErrorForPendingRequests();
13986 if (rc < 0) {
13987 LOGE("notifyErrorForPendingRequests failed");
13988 return rc;
13989 }
13990
13991 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13992 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13993 "Format:%d",
13994 mStreamConfigInfo.type[i],
13995 mStreamConfigInfo.stream_sizes[i].width,
13996 mStreamConfigInfo.stream_sizes[i].height,
13997 mStreamConfigInfo.postprocess_mask[i],
13998 mStreamConfigInfo.format[i]);
13999 }
14000
14001 /* Send meta stream info once again so that ISP can start */
14002 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14003 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14004 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14005 mParameters);
14006 if (rc < 0) {
14007 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14008 }
14009
14010 rc = startAllChannels();
14011 if (rc < 0) {
14012 LOGE("startAllChannels failed");
14013 return rc;
14014 }
14015
14016 LOGD("X");
14017 return rc;
14018}
14019
14020/*===========================================================================
14021 * FUNCTION : stopAllChannels
14022 *
14023 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14024 *
14025 * PARAMETERS : None
14026 *
14027 * RETURN : NO_ERROR on success
14028 * Error codes on failure
14029 *
14030 *==========================================================================*/
14031int32_t QCamera3HardwareInterface::stopAllChannels()
14032{
14033 int32_t rc = NO_ERROR;
14034
14035 LOGD("Stopping all channels");
14036 // Stop the Streams/Channels
14037 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14038 it != mStreamInfo.end(); it++) {
14039 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14040 if (channel) {
14041 channel->stop();
14042 }
14043 (*it)->status = INVALID;
14044 }
14045
14046 if (mSupportChannel) {
14047 mSupportChannel->stop();
14048 }
14049 if (mAnalysisChannel) {
14050 mAnalysisChannel->stop();
14051 }
14052 if (mRawDumpChannel) {
14053 mRawDumpChannel->stop();
14054 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014055 if (mHdrPlusRawSrcChannel) {
14056 mHdrPlusRawSrcChannel->stop();
14057 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014058 if (mMetadataChannel) {
14059 /* If content of mStreamInfo is not 0, there is metadata stream */
14060 mMetadataChannel->stop();
14061 }
14062
14063 LOGD("All channels stopped");
14064 return rc;
14065}
14066
14067/*===========================================================================
14068 * FUNCTION : startAllChannels
14069 *
14070 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14071 *
14072 * PARAMETERS : None
14073 *
14074 * RETURN : NO_ERROR on success
14075 * Error codes on failure
14076 *
14077 *==========================================================================*/
14078int32_t QCamera3HardwareInterface::startAllChannels()
14079{
14080 int32_t rc = NO_ERROR;
14081
14082 LOGD("Start all channels ");
14083 // Start the Streams/Channels
14084 if (mMetadataChannel) {
14085 /* If content of mStreamInfo is not 0, there is metadata stream */
14086 rc = mMetadataChannel->start();
14087 if (rc < 0) {
14088 LOGE("META channel start failed");
14089 return rc;
14090 }
14091 }
14092 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14093 it != mStreamInfo.end(); it++) {
14094 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14095 if (channel) {
14096 rc = channel->start();
14097 if (rc < 0) {
14098 LOGE("channel start failed");
14099 return rc;
14100 }
14101 }
14102 }
14103 if (mAnalysisChannel) {
14104 mAnalysisChannel->start();
14105 }
14106 if (mSupportChannel) {
14107 rc = mSupportChannel->start();
14108 if (rc < 0) {
14109 LOGE("Support channel start failed");
14110 return rc;
14111 }
14112 }
14113 if (mRawDumpChannel) {
14114 rc = mRawDumpChannel->start();
14115 if (rc < 0) {
14116 LOGE("RAW dump channel start failed");
14117 return rc;
14118 }
14119 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014120 if (mHdrPlusRawSrcChannel) {
14121 rc = mHdrPlusRawSrcChannel->start();
14122 if (rc < 0) {
14123 LOGE("HDR+ RAW channel start failed");
14124 return rc;
14125 }
14126 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014127
14128 LOGD("All channels started");
14129 return rc;
14130}
14131
14132/*===========================================================================
14133 * FUNCTION : notifyErrorForPendingRequests
14134 *
14135 * DESCRIPTION: This function sends error for all the pending requests/buffers
14136 *
14137 * PARAMETERS : None
14138 *
14139 * RETURN : Error codes
14140 * NO_ERROR on success
14141 *
14142 *==========================================================================*/
14143int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14144{
Emilian Peev7650c122017-01-19 08:24:33 -080014145 notifyErrorFoPendingDepthData(mDepthChannel);
14146
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014147 auto pendingRequest = mPendingRequestsList.begin();
14148 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014149
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014150 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14151 // buffers (for which buffers aren't sent yet).
14152 while (pendingRequest != mPendingRequestsList.end() ||
14153 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14154 if (pendingRequest == mPendingRequestsList.end() ||
14155 pendingBuffer->frame_number < pendingRequest->frame_number) {
14156 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14157 // with error.
14158 for (auto &info : pendingBuffer->mPendingBufferList) {
14159 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014160 camera3_notify_msg_t notify_msg;
14161 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14162 notify_msg.type = CAMERA3_MSG_ERROR;
14163 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014164 notify_msg.message.error.error_stream = info.stream;
14165 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014166 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014167
14168 camera3_stream_buffer_t buffer = {};
14169 buffer.acquire_fence = -1;
14170 buffer.release_fence = -1;
14171 buffer.buffer = info.buffer;
14172 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14173 buffer.stream = info.stream;
14174 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014175 }
14176
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014177 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14178 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14179 pendingBuffer->frame_number > pendingRequest->frame_number) {
14180 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014181 camera3_notify_msg_t notify_msg;
14182 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14183 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014184 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14185 notify_msg.message.error.error_stream = nullptr;
14186 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014187 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014188
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014189 if (pendingRequest->input_buffer != nullptr) {
14190 camera3_capture_result result = {};
14191 result.frame_number = pendingRequest->frame_number;
14192 result.result = nullptr;
14193 result.input_buffer = pendingRequest->input_buffer;
14194 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014195 }
14196
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014197 mShutterDispatcher.clear(pendingRequest->frame_number);
14198 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14199 } else {
14200 // If both buffers and result metadata weren't sent yet, notify about a request error
14201 // and return buffers with error.
14202 for (auto &info : pendingBuffer->mPendingBufferList) {
14203 camera3_notify_msg_t notify_msg;
14204 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14205 notify_msg.type = CAMERA3_MSG_ERROR;
14206 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14207 notify_msg.message.error.error_stream = info.stream;
14208 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14209 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014210
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014211 camera3_stream_buffer_t buffer = {};
14212 buffer.acquire_fence = -1;
14213 buffer.release_fence = -1;
14214 buffer.buffer = info.buffer;
14215 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14216 buffer.stream = info.stream;
14217 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14218 }
14219
14220 if (pendingRequest->input_buffer != nullptr) {
14221 camera3_capture_result result = {};
14222 result.frame_number = pendingRequest->frame_number;
14223 result.result = nullptr;
14224 result.input_buffer = pendingRequest->input_buffer;
14225 orchestrateResult(&result);
14226 }
14227
14228 mShutterDispatcher.clear(pendingRequest->frame_number);
14229 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14230 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014231 }
14232 }
14233
14234 /* Reset pending frame Drop list and requests list */
14235 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014236 mShutterDispatcher.clear();
14237 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014238 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014239 mExpectedFrameDuration = 0;
14240 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014241 LOGH("Cleared all the pending buffers ");
14242
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014243 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014244}
14245
14246bool QCamera3HardwareInterface::isOnEncoder(
14247 const cam_dimension_t max_viewfinder_size,
14248 uint32_t width, uint32_t height)
14249{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014250 return ((width > (uint32_t)max_viewfinder_size.width) ||
14251 (height > (uint32_t)max_viewfinder_size.height) ||
14252 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14253 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014254}
14255
14256/*===========================================================================
14257 * FUNCTION : setBundleInfo
14258 *
14259 * DESCRIPTION: Set bundle info for all streams that are bundle.
14260 *
14261 * PARAMETERS : None
14262 *
14263 * RETURN : NO_ERROR on success
14264 * Error codes on failure
14265 *==========================================================================*/
14266int32_t QCamera3HardwareInterface::setBundleInfo()
14267{
14268 int32_t rc = NO_ERROR;
14269
14270 if (mChannelHandle) {
14271 cam_bundle_config_t bundleInfo;
14272 memset(&bundleInfo, 0, sizeof(bundleInfo));
14273 rc = mCameraHandle->ops->get_bundle_info(
14274 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14275 if (rc != NO_ERROR) {
14276 LOGE("get_bundle_info failed");
14277 return rc;
14278 }
14279 if (mAnalysisChannel) {
14280 mAnalysisChannel->setBundleInfo(bundleInfo);
14281 }
14282 if (mSupportChannel) {
14283 mSupportChannel->setBundleInfo(bundleInfo);
14284 }
14285 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14286 it != mStreamInfo.end(); it++) {
14287 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14288 channel->setBundleInfo(bundleInfo);
14289 }
14290 if (mRawDumpChannel) {
14291 mRawDumpChannel->setBundleInfo(bundleInfo);
14292 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014293 if (mHdrPlusRawSrcChannel) {
14294 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14295 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014296 }
14297
14298 return rc;
14299}
14300
14301/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014302 * FUNCTION : setInstantAEC
14303 *
14304 * DESCRIPTION: Set Instant AEC related params.
14305 *
14306 * PARAMETERS :
14307 * @meta: CameraMetadata reference
14308 *
14309 * RETURN : NO_ERROR on success
14310 * Error codes on failure
14311 *==========================================================================*/
14312int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14313{
14314 int32_t rc = NO_ERROR;
14315 uint8_t val = 0;
14316 char prop[PROPERTY_VALUE_MAX];
14317
14318 // First try to configure instant AEC from framework metadata
14319 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14320 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14321 }
14322
14323 // If framework did not set this value, try to read from set prop.
14324 if (val == 0) {
14325 memset(prop, 0, sizeof(prop));
14326 property_get("persist.camera.instant.aec", prop, "0");
14327 val = (uint8_t)atoi(prop);
14328 }
14329
14330 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14331 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14332 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14333 mInstantAEC = val;
14334 mInstantAECSettledFrameNumber = 0;
14335 mInstantAecFrameIdxCount = 0;
14336 LOGH("instantAEC value set %d",val);
14337 if (mInstantAEC) {
14338 memset(prop, 0, sizeof(prop));
14339 property_get("persist.camera.ae.instant.bound", prop, "10");
14340 int32_t aec_frame_skip_cnt = atoi(prop);
14341 if (aec_frame_skip_cnt >= 0) {
14342 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14343 } else {
14344 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14345 rc = BAD_VALUE;
14346 }
14347 }
14348 } else {
14349 LOGE("Bad instant aec value set %d", val);
14350 rc = BAD_VALUE;
14351 }
14352 return rc;
14353}
14354
14355/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014356 * FUNCTION : get_num_overall_buffers
14357 *
14358 * DESCRIPTION: Estimate number of pending buffers across all requests.
14359 *
14360 * PARAMETERS : None
14361 *
14362 * RETURN : Number of overall pending buffers
14363 *
14364 *==========================================================================*/
14365uint32_t PendingBuffersMap::get_num_overall_buffers()
14366{
14367 uint32_t sum_buffers = 0;
14368 for (auto &req : mPendingBuffersInRequest) {
14369 sum_buffers += req.mPendingBufferList.size();
14370 }
14371 return sum_buffers;
14372}
14373
14374/*===========================================================================
14375 * FUNCTION : removeBuf
14376 *
14377 * DESCRIPTION: Remove a matching buffer from tracker.
14378 *
14379 * PARAMETERS : @buffer: image buffer for the callback
14380 *
14381 * RETURN : None
14382 *
14383 *==========================================================================*/
14384void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14385{
14386 bool buffer_found = false;
14387 for (auto req = mPendingBuffersInRequest.begin();
14388 req != mPendingBuffersInRequest.end(); req++) {
14389 for (auto k = req->mPendingBufferList.begin();
14390 k != req->mPendingBufferList.end(); k++ ) {
14391 if (k->buffer == buffer) {
14392 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14393 req->frame_number, buffer);
14394 k = req->mPendingBufferList.erase(k);
14395 if (req->mPendingBufferList.empty()) {
14396 // Remove this request from Map
14397 req = mPendingBuffersInRequest.erase(req);
14398 }
14399 buffer_found = true;
14400 break;
14401 }
14402 }
14403 if (buffer_found) {
14404 break;
14405 }
14406 }
14407 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14408 get_num_overall_buffers());
14409}
14410
14411/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014412 * FUNCTION : getBufErrStatus
14413 *
14414 * DESCRIPTION: get buffer error status
14415 *
14416 * PARAMETERS : @buffer: buffer handle
14417 *
14418 * RETURN : Error status
14419 *
14420 *==========================================================================*/
14421int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14422{
14423 for (auto& req : mPendingBuffersInRequest) {
14424 for (auto& k : req.mPendingBufferList) {
14425 if (k.buffer == buffer)
14426 return k.bufStatus;
14427 }
14428 }
14429 return CAMERA3_BUFFER_STATUS_OK;
14430}
14431
14432/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014433 * FUNCTION : setPAAFSupport
14434 *
14435 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14436 * feature mask according to stream type and filter
14437 * arrangement
14438 *
14439 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14440 * @stream_type: stream type
14441 * @filter_arrangement: filter arrangement
14442 *
14443 * RETURN : None
14444 *==========================================================================*/
14445void QCamera3HardwareInterface::setPAAFSupport(
14446 cam_feature_mask_t& feature_mask,
14447 cam_stream_type_t stream_type,
14448 cam_color_filter_arrangement_t filter_arrangement)
14449{
Thierry Strudel3d639192016-09-09 11:52:26 -070014450 switch (filter_arrangement) {
14451 case CAM_FILTER_ARRANGEMENT_RGGB:
14452 case CAM_FILTER_ARRANGEMENT_GRBG:
14453 case CAM_FILTER_ARRANGEMENT_GBRG:
14454 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014455 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14456 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014457 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014458 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14459 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014460 }
14461 break;
14462 case CAM_FILTER_ARRANGEMENT_Y:
14463 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14464 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14465 }
14466 break;
14467 default:
14468 break;
14469 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014470 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14471 feature_mask, stream_type, filter_arrangement);
14472
14473
Thierry Strudel3d639192016-09-09 11:52:26 -070014474}
14475
14476/*===========================================================================
14477* FUNCTION : getSensorMountAngle
14478*
14479* DESCRIPTION: Retrieve sensor mount angle
14480*
14481* PARAMETERS : None
14482*
14483* RETURN : sensor mount angle in uint32_t
14484*==========================================================================*/
14485uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14486{
14487 return gCamCapability[mCameraId]->sensor_mount_angle;
14488}
14489
14490/*===========================================================================
14491* FUNCTION : getRelatedCalibrationData
14492*
14493* DESCRIPTION: Retrieve related system calibration data
14494*
14495* PARAMETERS : None
14496*
14497* RETURN : Pointer of related system calibration data
14498*==========================================================================*/
14499const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14500{
14501 return (const cam_related_system_calibration_data_t *)
14502 &(gCamCapability[mCameraId]->related_cam_calibration);
14503}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014504
14505/*===========================================================================
14506 * FUNCTION : is60HzZone
14507 *
14508 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14509 *
14510 * PARAMETERS : None
14511 *
14512 * RETURN : True if in 60Hz zone, False otherwise
14513 *==========================================================================*/
14514bool QCamera3HardwareInterface::is60HzZone()
14515{
14516 time_t t = time(NULL);
14517 struct tm lt;
14518
14519 struct tm* r = localtime_r(&t, &lt);
14520
14521 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14522 return true;
14523 else
14524 return false;
14525}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014526
14527/*===========================================================================
14528 * FUNCTION : adjustBlackLevelForCFA
14529 *
14530 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14531 * of bayer CFA (Color Filter Array).
14532 *
14533 * PARAMETERS : @input: black level pattern in the order of RGGB
14534 * @output: black level pattern in the order of CFA
14535 * @color_arrangement: CFA color arrangement
14536 *
14537 * RETURN : None
14538 *==========================================================================*/
14539template<typename T>
14540void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14541 T input[BLACK_LEVEL_PATTERN_CNT],
14542 T output[BLACK_LEVEL_PATTERN_CNT],
14543 cam_color_filter_arrangement_t color_arrangement)
14544{
14545 switch (color_arrangement) {
14546 case CAM_FILTER_ARRANGEMENT_GRBG:
14547 output[0] = input[1];
14548 output[1] = input[0];
14549 output[2] = input[3];
14550 output[3] = input[2];
14551 break;
14552 case CAM_FILTER_ARRANGEMENT_GBRG:
14553 output[0] = input[2];
14554 output[1] = input[3];
14555 output[2] = input[0];
14556 output[3] = input[1];
14557 break;
14558 case CAM_FILTER_ARRANGEMENT_BGGR:
14559 output[0] = input[3];
14560 output[1] = input[2];
14561 output[2] = input[1];
14562 output[3] = input[0];
14563 break;
14564 case CAM_FILTER_ARRANGEMENT_RGGB:
14565 output[0] = input[0];
14566 output[1] = input[1];
14567 output[2] = input[2];
14568 output[3] = input[3];
14569 break;
14570 default:
14571 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14572 break;
14573 }
14574}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014575
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014576void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14577 CameraMetadata &resultMetadata,
14578 std::shared_ptr<metadata_buffer_t> settings)
14579{
14580 if (settings == nullptr) {
14581 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14582 return;
14583 }
14584
14585 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14586 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14587 }
14588
14589 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14590 String8 str((const char *)gps_methods);
14591 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14592 }
14593
14594 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14595 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14596 }
14597
14598 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14599 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14600 }
14601
14602 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14603 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14604 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14605 }
14606
14607 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14608 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14609 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14610 }
14611
14612 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14613 int32_t fwk_thumb_size[2];
14614 fwk_thumb_size[0] = thumb_size->width;
14615 fwk_thumb_size[1] = thumb_size->height;
14616 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14617 }
14618
14619 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14620 uint8_t fwk_intent = intent[0];
14621 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14622 }
14623}
14624
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014625bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14626 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14627 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014628{
14629 if (hdrPlusRequest == nullptr) return false;
14630
14631 // Check noise reduction mode is high quality.
14632 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14633 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14634 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014635 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14636 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014637 return false;
14638 }
14639
14640 // Check edge mode is high quality.
14641 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14642 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14643 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14644 return false;
14645 }
14646
14647 if (request.num_output_buffers != 1 ||
14648 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14649 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014650 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14651 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14652 request.output_buffers[0].stream->width,
14653 request.output_buffers[0].stream->height,
14654 request.output_buffers[0].stream->format);
14655 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014656 return false;
14657 }
14658
14659 // Get a YUV buffer from pic channel.
14660 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14661 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14662 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14663 if (res != OK) {
14664 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14665 __FUNCTION__, strerror(-res), res);
14666 return false;
14667 }
14668
14669 pbcamera::StreamBuffer buffer;
14670 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014671 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014672 buffer.data = yuvBuffer->buffer;
14673 buffer.dataSize = yuvBuffer->frame_len;
14674
14675 pbcamera::CaptureRequest pbRequest;
14676 pbRequest.id = request.frame_number;
14677 pbRequest.outputBuffers.push_back(buffer);
14678
14679 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014680 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014681 if (res != OK) {
14682 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14683 strerror(-res), res);
14684 return false;
14685 }
14686
14687 hdrPlusRequest->yuvBuffer = yuvBuffer;
14688 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14689
14690 return true;
14691}
14692
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014693status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14694{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014695 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14696 return OK;
14697 }
14698
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014699 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014700 if (res != OK) {
14701 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14702 strerror(-res), res);
14703 return res;
14704 }
14705 gHdrPlusClientOpening = true;
14706
14707 return OK;
14708}
14709
Chien-Yu Chenee335912017-02-09 17:53:20 -080014710status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14711{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014712 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014713
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014714 if (mHdrPlusModeEnabled) {
14715 return OK;
14716 }
14717
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014718 // Check if gHdrPlusClient is opened or being opened.
14719 if (gHdrPlusClient == nullptr) {
14720 if (gHdrPlusClientOpening) {
14721 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14722 return OK;
14723 }
14724
14725 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014726 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014727 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14728 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014729 return res;
14730 }
14731
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014732 // When opening HDR+ client completes, HDR+ mode will be enabled.
14733 return OK;
14734
Chien-Yu Chenee335912017-02-09 17:53:20 -080014735 }
14736
14737 // Configure stream for HDR+.
14738 res = configureHdrPlusStreamsLocked();
14739 if (res != OK) {
14740 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014741 return res;
14742 }
14743
14744 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14745 res = gHdrPlusClient->setZslHdrPlusMode(true);
14746 if (res != OK) {
14747 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014748 return res;
14749 }
14750
14751 mHdrPlusModeEnabled = true;
14752 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14753
14754 return OK;
14755}
14756
14757void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14758{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014759 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014760 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014761 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14762 if (res != OK) {
14763 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14764 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014765
14766 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014767 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014768 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014769 }
14770
14771 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014772 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014773 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14774}
14775
14776status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014777{
14778 pbcamera::InputConfiguration inputConfig;
14779 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14780 status_t res = OK;
14781
14782 // Configure HDR+ client streams.
14783 // Get input config.
14784 if (mHdrPlusRawSrcChannel) {
14785 // HDR+ input buffers will be provided by HAL.
14786 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14787 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14788 if (res != OK) {
14789 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14790 __FUNCTION__, strerror(-res), res);
14791 return res;
14792 }
14793
14794 inputConfig.isSensorInput = false;
14795 } else {
14796 // Sensor MIPI will send data to Easel.
14797 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014798 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014799 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14800 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14801 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14802 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14803 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014804 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014805 if (mSensorModeInfo.num_raw_bits != 10) {
14806 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14807 mSensorModeInfo.num_raw_bits);
14808 return BAD_VALUE;
14809 }
14810
14811 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014812 }
14813
14814 // Get output configurations.
14815 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014816 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014817
14818 // Easel may need to output YUV output buffers if mPictureChannel was created.
14819 pbcamera::StreamConfiguration yuvOutputConfig;
14820 if (mPictureChannel != nullptr) {
14821 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14822 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14823 if (res != OK) {
14824 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14825 __FUNCTION__, strerror(-res), res);
14826
14827 return res;
14828 }
14829
14830 outputStreamConfigs.push_back(yuvOutputConfig);
14831 }
14832
14833 // TODO: consider other channels for YUV output buffers.
14834
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014835 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014836 if (res != OK) {
14837 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14838 strerror(-res), res);
14839 return res;
14840 }
14841
14842 return OK;
14843}
14844
Chien-Yu Chen933db802017-07-14 14:31:53 -070014845void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
14846{
14847 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
14848 // Set HAL state to error.
14849 pthread_mutex_lock(&mMutex);
14850 mState = ERROR;
14851 pthread_mutex_unlock(&mMutex);
14852
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014853 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen933db802017-07-14 14:31:53 -070014854}
14855
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014856void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14857{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014858 if (client == nullptr) {
14859 ALOGE("%s: Opened client is null.", __FUNCTION__);
14860 return;
14861 }
14862
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014863 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014864 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14865
14866 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014867 if (!gHdrPlusClientOpening) {
14868 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14869 return;
14870 }
14871
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014872 gHdrPlusClient = std::move(client);
14873 gHdrPlusClientOpening = false;
14874
14875 // Set static metadata.
14876 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14877 if (res != OK) {
14878 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14879 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014880 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014881 gHdrPlusClient = nullptr;
14882 return;
14883 }
14884
14885 // Enable HDR+ mode.
14886 res = enableHdrPlusModeLocked();
14887 if (res != OK) {
14888 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14889 }
14890}
14891
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014892void QCamera3HardwareInterface::onOpenFailed(status_t err)
14893{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014894 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14895 Mutex::Autolock l(gHdrPlusClientLock);
14896 gHdrPlusClientOpening = false;
14897}
14898
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014899void QCamera3HardwareInterface::onFatalError()
14900{
14901 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14902
14903 // Set HAL state to error.
14904 pthread_mutex_lock(&mMutex);
14905 mState = ERROR;
14906 pthread_mutex_unlock(&mMutex);
14907
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014908 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014909}
14910
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014911void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014912 const camera_metadata_t &resultMetadata)
14913{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014914 if (result != nullptr) {
14915 if (result->outputBuffers.size() != 1) {
14916 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14917 result->outputBuffers.size());
14918 return;
14919 }
14920
14921 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14922 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14923 result->outputBuffers[0].streamId);
14924 return;
14925 }
14926
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014927 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014928 HdrPlusPendingRequest pendingRequest;
14929 {
14930 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14931 auto req = mHdrPlusPendingRequests.find(result->requestId);
14932 pendingRequest = req->second;
14933 }
14934
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014935 // Update the result metadata with the settings of the HDR+ still capture request because
14936 // the result metadata belongs to a ZSL buffer.
14937 CameraMetadata metadata;
14938 metadata = &resultMetadata;
14939 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14940 camera_metadata_t* updatedResultMetadata = metadata.release();
14941
14942 QCamera3PicChannel *picChannel =
14943 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14944
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014945 // Check if dumping HDR+ YUV output is enabled.
14946 char prop[PROPERTY_VALUE_MAX];
14947 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14948 bool dumpYuvOutput = atoi(prop);
14949
14950 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014951 // Dump yuv buffer to a ppm file.
14952 pbcamera::StreamConfiguration outputConfig;
14953 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14954 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14955 if (rc == OK) {
14956 char buf[FILENAME_MAX] = {};
14957 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14958 result->requestId, result->outputBuffers[0].streamId,
14959 outputConfig.image.width, outputConfig.image.height);
14960
14961 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14962 } else {
14963 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14964 __FUNCTION__, strerror(-rc), rc);
14965 }
14966 }
14967
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014968 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14969 auto halMetadata = std::make_shared<metadata_buffer_t>();
14970 clear_metadata_buffer(halMetadata.get());
14971
14972 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14973 // encoding.
14974 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14975 halStreamId, /*minFrameDuration*/0);
14976 if (res == OK) {
14977 // Return the buffer to pic channel for encoding.
14978 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14979 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14980 halMetadata);
14981 } else {
14982 // Return the buffer without encoding.
14983 // TODO: This should not happen but we may want to report an error buffer to camera
14984 // service.
14985 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14986 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14987 strerror(-res), res);
14988 }
14989
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014990 // Find the timestamp
14991 camera_metadata_ro_entry_t entry;
14992 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14993 ANDROID_SENSOR_TIMESTAMP, &entry);
14994 if (res != OK) {
14995 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14996 __FUNCTION__, result->requestId, strerror(-res), res);
14997 } else {
14998 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14999 }
15000
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015001 // Send HDR+ metadata to framework.
15002 {
15003 pthread_mutex_lock(&mMutex);
15004
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015005 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15006 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015007 pthread_mutex_unlock(&mMutex);
15008 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015009
15010 // Remove the HDR+ pending request.
15011 {
15012 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15013 auto req = mHdrPlusPendingRequests.find(result->requestId);
15014 mHdrPlusPendingRequests.erase(req);
15015 }
15016 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015017}
15018
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015019void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15020{
15021 if (failedResult == nullptr) {
15022 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15023 return;
15024 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015025
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015026 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015027
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015028 // Remove the pending HDR+ request.
15029 {
15030 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15031 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15032
15033 // Return the buffer to pic channel.
15034 QCamera3PicChannel *picChannel =
15035 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15036 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15037
15038 mHdrPlusPendingRequests.erase(pendingRequest);
15039 }
15040
15041 pthread_mutex_lock(&mMutex);
15042
15043 // Find the pending buffers.
15044 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15045 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15046 if (pendingBuffers->frame_number == failedResult->requestId) {
15047 break;
15048 }
15049 pendingBuffers++;
15050 }
15051
15052 // Send out buffer errors for the pending buffers.
15053 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15054 std::vector<camera3_stream_buffer_t> streamBuffers;
15055 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15056 // Prepare a stream buffer.
15057 camera3_stream_buffer_t streamBuffer = {};
15058 streamBuffer.stream = buffer.stream;
15059 streamBuffer.buffer = buffer.buffer;
15060 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15061 streamBuffer.acquire_fence = -1;
15062 streamBuffer.release_fence = -1;
15063
15064 streamBuffers.push_back(streamBuffer);
15065
15066 // Send out error buffer event.
15067 camera3_notify_msg_t notify_msg = {};
15068 notify_msg.type = CAMERA3_MSG_ERROR;
15069 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15070 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15071 notify_msg.message.error.error_stream = buffer.stream;
15072
15073 orchestrateNotify(&notify_msg);
15074 }
15075
15076 camera3_capture_result_t result = {};
15077 result.frame_number = pendingBuffers->frame_number;
15078 result.num_output_buffers = streamBuffers.size();
15079 result.output_buffers = &streamBuffers[0];
15080
15081 // Send out result with buffer errors.
15082 orchestrateResult(&result);
15083
15084 // Remove pending buffers.
15085 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15086 }
15087
15088 // Remove pending request.
15089 auto halRequest = mPendingRequestsList.begin();
15090 while (halRequest != mPendingRequestsList.end()) {
15091 if (halRequest->frame_number == failedResult->requestId) {
15092 mPendingRequestsList.erase(halRequest);
15093 break;
15094 }
15095 halRequest++;
15096 }
15097
15098 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015099}
15100
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015101
15102ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15103 mParent(parent) {}
15104
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015105void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015106{
15107 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015108
15109 if (isReprocess) {
15110 mReprocessShutters.emplace(frameNumber, Shutter());
15111 } else {
15112 mShutters.emplace(frameNumber, Shutter());
15113 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015114}
15115
15116void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15117{
15118 std::lock_guard<std::mutex> lock(mLock);
15119
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015120 std::map<uint32_t, Shutter> *shutters = nullptr;
15121
15122 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015123 auto shutter = mShutters.find(frameNumber);
15124 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015125 shutter = mReprocessShutters.find(frameNumber);
15126 if (shutter == mReprocessShutters.end()) {
15127 // Shutter was already sent.
15128 return;
15129 }
15130 shutters = &mReprocessShutters;
15131 } else {
15132 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015133 }
15134
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015135 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015136 shutter->second.ready = true;
15137 shutter->second.timestamp = timestamp;
15138
15139 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015140 shutter = shutters->begin();
15141 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015142 if (!shutter->second.ready) {
15143 // If this shutter is not ready, the following shutters can't be sent.
15144 break;
15145 }
15146
15147 camera3_notify_msg_t msg = {};
15148 msg.type = CAMERA3_MSG_SHUTTER;
15149 msg.message.shutter.frame_number = shutter->first;
15150 msg.message.shutter.timestamp = shutter->second.timestamp;
15151 mParent->orchestrateNotify(&msg);
15152
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015153 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015154 }
15155}
15156
15157void ShutterDispatcher::clear(uint32_t frameNumber)
15158{
15159 std::lock_guard<std::mutex> lock(mLock);
15160 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015161 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015162}
15163
15164void ShutterDispatcher::clear()
15165{
15166 std::lock_guard<std::mutex> lock(mLock);
15167
15168 // Log errors for stale shutters.
15169 for (auto &shutter : mShutters) {
15170 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15171 __FUNCTION__, shutter.first, shutter.second.ready,
15172 shutter.second.timestamp);
15173 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015174
15175 // Log errors for stale reprocess shutters.
15176 for (auto &shutter : mReprocessShutters) {
15177 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15178 __FUNCTION__, shutter.first, shutter.second.ready,
15179 shutter.second.timestamp);
15180 }
15181
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015182 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015183 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015184}
15185
15186OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15187 mParent(parent) {}
15188
15189status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15190{
15191 std::lock_guard<std::mutex> lock(mLock);
15192 mStreamBuffers.clear();
15193 if (!streamList) {
15194 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15195 return -EINVAL;
15196 }
15197
15198 // Create a "frame-number -> buffer" map for each stream.
15199 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15200 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15201 }
15202
15203 return OK;
15204}
15205
15206status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15207{
15208 std::lock_guard<std::mutex> lock(mLock);
15209
15210 // Find the "frame-number -> buffer" map for the stream.
15211 auto buffers = mStreamBuffers.find(stream);
15212 if (buffers == mStreamBuffers.end()) {
15213 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15214 return -EINVAL;
15215 }
15216
15217 // Create an unready buffer for this frame number.
15218 buffers->second.emplace(frameNumber, Buffer());
15219 return OK;
15220}
15221
15222void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15223 const camera3_stream_buffer_t &buffer)
15224{
15225 std::lock_guard<std::mutex> lock(mLock);
15226
15227 // Find the frame number -> buffer map for the stream.
15228 auto buffers = mStreamBuffers.find(buffer.stream);
15229 if (buffers == mStreamBuffers.end()) {
15230 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15231 return;
15232 }
15233
15234 // Find the unready buffer this frame number and mark it ready.
15235 auto pendingBuffer = buffers->second.find(frameNumber);
15236 if (pendingBuffer == buffers->second.end()) {
15237 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15238 return;
15239 }
15240
15241 pendingBuffer->second.ready = true;
15242 pendingBuffer->second.buffer = buffer;
15243
15244 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15245 pendingBuffer = buffers->second.begin();
15246 while (pendingBuffer != buffers->second.end()) {
15247 if (!pendingBuffer->second.ready) {
15248 // If this buffer is not ready, the following buffers can't be sent.
15249 break;
15250 }
15251
15252 camera3_capture_result_t result = {};
15253 result.frame_number = pendingBuffer->first;
15254 result.num_output_buffers = 1;
15255 result.output_buffers = &pendingBuffer->second.buffer;
15256
15257 // Send out result with buffer errors.
15258 mParent->orchestrateResult(&result);
15259
15260 pendingBuffer = buffers->second.erase(pendingBuffer);
15261 }
15262}
15263
15264void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15265{
15266 std::lock_guard<std::mutex> lock(mLock);
15267
15268 // Log errors for stale buffers.
15269 for (auto &buffers : mStreamBuffers) {
15270 for (auto &buffer : buffers.second) {
15271 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15272 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15273 }
15274 buffers.second.clear();
15275 }
15276
15277 if (clearConfiguredStreams) {
15278 mStreamBuffers.clear();
15279 }
15280}
15281
Thierry Strudel3d639192016-09-09 11:52:26 -070015282}; //end namespace qcamera