blob: 05d7b34c098a576dc2983f9501a32596907374bf [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700136// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
137#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
138
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700139// Whether to check for the GPU stride padding, or use the default
140//#define CHECK_GPU_PIXEL_ALIGNMENT
141
Thierry Strudel3d639192016-09-09 11:52:26 -0700142cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
143const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
144extern pthread_mutex_t gCamLock;
145volatile uint32_t gCamHal3LogLevel = 1;
146extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800148// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149// The following Easel related variables must be protected by gHdrPlusClientLock.
150EaselManagerClient gEaselManagerClient;
151bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
152std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
153bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700154bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700155bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
160Mutex gHdrPlusClientLock; // Protect above Easel related variables.
161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
280};
281
282const QCamera3HardwareInterface::QCameraMap<
283 camera_metadata_enum_android_flash_mode_t,
284 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
285 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
286 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
287 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
288};
289
290const QCamera3HardwareInterface::QCameraMap<
291 camera_metadata_enum_android_statistics_face_detect_mode_t,
292 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
296};
297
298const QCamera3HardwareInterface::QCameraMap<
299 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
300 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
301 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
302 CAM_FOCUS_UNCALIBRATED },
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
304 CAM_FOCUS_APPROXIMATE },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
306 CAM_FOCUS_CALIBRATED }
307};
308
309const QCamera3HardwareInterface::QCameraMap<
310 camera_metadata_enum_android_lens_state_t,
311 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
312 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
313 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
314};
315
316const int32_t available_thumbnail_sizes[] = {0, 0,
317 176, 144,
318 240, 144,
319 256, 144,
320 240, 160,
321 256, 154,
322 240, 240,
323 320, 240};
324
325const QCamera3HardwareInterface::QCameraMap<
326 camera_metadata_enum_android_sensor_test_pattern_mode_t,
327 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
334};
335
336/* Since there is no mapping for all the options some Android enum are not listed.
337 * Also, the order in this list is important because while mapping from HAL to Android it will
338 * traverse from lower to higher index which means that for HAL values that are map to different
339 * Android values, the traverse logic will select the first one found.
340 */
341const QCamera3HardwareInterface::QCameraMap<
342 camera_metadata_enum_android_sensor_reference_illuminant1_t,
343 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
360};
361
362const QCamera3HardwareInterface::QCameraMap<
363 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
364 { 60, CAM_HFR_MODE_60FPS},
365 { 90, CAM_HFR_MODE_90FPS},
366 { 120, CAM_HFR_MODE_120FPS},
367 { 150, CAM_HFR_MODE_150FPS},
368 { 180, CAM_HFR_MODE_180FPS},
369 { 210, CAM_HFR_MODE_210FPS},
370 { 240, CAM_HFR_MODE_240FPS},
371 { 480, CAM_HFR_MODE_480FPS},
372};
373
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700374const QCamera3HardwareInterface::QCameraMap<
375 qcamera3_ext_instant_aec_mode_t,
376 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
377 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
380};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800381
382const QCamera3HardwareInterface::QCameraMap<
383 qcamera3_ext_exposure_meter_mode_t,
384 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
385 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
386 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
388 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
389 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
390 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
391 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
392};
393
394const QCamera3HardwareInterface::QCameraMap<
395 qcamera3_ext_iso_mode_t,
396 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
397 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
398 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
399 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
400 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
401 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
402 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
403 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
404 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
405};
406
Thierry Strudel3d639192016-09-09 11:52:26 -0700407camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
408 .initialize = QCamera3HardwareInterface::initialize,
409 .configure_streams = QCamera3HardwareInterface::configure_streams,
410 .register_stream_buffers = NULL,
411 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
412 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
413 .get_metadata_vendor_tag_ops = NULL,
414 .dump = QCamera3HardwareInterface::dump,
415 .flush = QCamera3HardwareInterface::flush,
416 .reserved = {0},
417};
418
419// initialise to some default value
420uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
421
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700422static inline void logEaselEvent(const char *tag, const char *event) {
423 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
424 struct timespec ts = {};
425 static int64_t kMsPerSec = 1000;
426 static int64_t kNsPerMs = 1000000;
427 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
428 if (res != OK) {
429 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
430 } else {
431 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
432 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
433 }
434 }
435}
436
Thierry Strudel3d639192016-09-09 11:52:26 -0700437/*===========================================================================
438 * FUNCTION : QCamera3HardwareInterface
439 *
440 * DESCRIPTION: constructor of QCamera3HardwareInterface
441 *
442 * PARAMETERS :
443 * @cameraId : camera ID
444 *
445 * RETURN : none
446 *==========================================================================*/
447QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
448 const camera_module_callbacks_t *callbacks)
449 : mCameraId(cameraId),
450 mCameraHandle(NULL),
451 mCameraInitialized(false),
452 mCallbackOps(NULL),
453 mMetadataChannel(NULL),
454 mPictureChannel(NULL),
455 mRawChannel(NULL),
456 mSupportChannel(NULL),
457 mAnalysisChannel(NULL),
458 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700459 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700460 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800461 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800462 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mChannelHandle(0),
464 mFirstConfiguration(true),
465 mFlush(false),
466 mFlushPerf(false),
467 mParamHeap(NULL),
468 mParameters(NULL),
469 mPrevParameters(NULL),
470 m_bIsVideo(false),
471 m_bIs4KVideo(false),
472 m_bEisSupportedSize(false),
473 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800474 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700476 mShutterDispatcher(this),
477 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mMinProcessedFrameDuration(0),
479 mMinJpegFrameDuration(0),
480 mMinRawFrameDuration(0),
481 mMetaFrameCount(0U),
482 mUpdateDebugLevel(false),
483 mCallbacks(callbacks),
484 mCaptureIntent(0),
485 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700486 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800487 /* DevCamDebug metadata internal m control*/
488 mDevCamDebugMetaEnable(0),
489 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700490 mBatchSize(0),
491 mToBeQueuedVidBufs(0),
492 mHFRVideoFps(DEFAULT_VIDEO_FPS),
493 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800494 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800495 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700496 mFirstFrameNumberInBatch(0),
497 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800498 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700499 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
500 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000501 mPDSupported(false),
502 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700503 mInstantAEC(false),
504 mResetInstantAEC(false),
505 mInstantAECSettledFrameNumber(0),
506 mAecSkipDisplayFrameBound(0),
507 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800508 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700509 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700511 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mState(CLOSED),
513 mIsDeviceLinked(false),
514 mIsMainCamera(true),
515 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700516 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800517 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800518 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700519 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mIsApInputUsedForHdrPlus(false),
521 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800522 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700523{
524 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700525 mCommon.init(gCamCapability[cameraId]);
526 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700527#ifndef USE_HAL_3_3
528 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
529#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700531#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.close = close_camera_device;
533 mCameraDevice.ops = &mCameraOps;
534 mCameraDevice.priv = this;
535 gCamCapability[cameraId]->version = CAM_HAL_V3;
536 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
537 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
538 gCamCapability[cameraId]->min_num_pp_bufs = 3;
539
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800540 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543 mPendingLiveRequest = 0;
544 mCurrentRequestId = -1;
545 pthread_mutex_init(&mMutex, NULL);
546
547 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
548 mDefaultMetadata[i] = NULL;
549
550 // Getting system props of different kinds
551 char prop[PROPERTY_VALUE_MAX];
552 memset(prop, 0, sizeof(prop));
553 property_get("persist.camera.raw.dump", prop, "0");
554 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800555 property_get("persist.camera.hal3.force.hdr", prop, "0");
556 mForceHdrSnapshot = atoi(prop);
557
Thierry Strudel3d639192016-09-09 11:52:26 -0700558 if (mEnableRawDump)
559 LOGD("Raw dump from Camera HAL enabled");
560
561 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
562 memset(mLdafCalib, 0, sizeof(mLdafCalib));
563
564 memset(prop, 0, sizeof(prop));
565 property_get("persist.camera.tnr.preview", prop, "0");
566 m_bTnrPreview = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800569 property_get("persist.camera.swtnr.preview", prop, "1");
570 m_bSwTnrPreview = (uint8_t)atoi(prop);
571
572 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700573 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700574 m_bTnrVideo = (uint8_t)atoi(prop);
575
576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.avtimer.debug", prop, "0");
578 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800579 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700580
Thierry Strudel54dc9782017-02-15 12:12:10 -0800581 memset(prop, 0, sizeof(prop));
582 property_get("persist.camera.cacmode.disable", prop, "0");
583 m_cacModeDisabled = (uint8_t)atoi(prop);
584
Thierry Strudel3d639192016-09-09 11:52:26 -0700585 //Load and read GPU library.
586 lib_surface_utils = NULL;
587 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700588 mSurfaceStridePadding = CAM_PAD_TO_64;
589#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700590 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
591 if (lib_surface_utils) {
592 *(void **)&LINK_get_surface_pixel_alignment =
593 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
594 if (LINK_get_surface_pixel_alignment) {
595 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
596 }
597 dlclose(lib_surface_utils);
598 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700599#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000600 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
601 mPDSupported = (0 <= mPDIndex) ? true : false;
602
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700603 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700604}
605
606/*===========================================================================
607 * FUNCTION : ~QCamera3HardwareInterface
608 *
609 * DESCRIPTION: destructor of QCamera3HardwareInterface
610 *
611 * PARAMETERS : none
612 *
613 * RETURN : none
614 *==========================================================================*/
615QCamera3HardwareInterface::~QCamera3HardwareInterface()
616{
617 LOGD("E");
618
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800619 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700620
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800621 // Disable power hint and enable the perf lock for close camera
622 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
623 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
624
625 // unlink of dualcam during close camera
626 if (mIsDeviceLinked) {
627 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
628 &m_pDualCamCmdPtr->bundle_info;
629 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
630 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
631 pthread_mutex_lock(&gCamLock);
632
633 if (mIsMainCamera == 1) {
634 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
635 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
636 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
637 // related session id should be session id of linked session
638 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
639 } else {
640 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
641 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
642 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
643 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
644 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800645 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800646 pthread_mutex_unlock(&gCamLock);
647
648 rc = mCameraHandle->ops->set_dual_cam_cmd(
649 mCameraHandle->camera_handle);
650 if (rc < 0) {
651 LOGE("Dualcam: Unlink failed, but still proceed to close");
652 }
653 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700654
655 /* We need to stop all streams before deleting any stream */
656 if (mRawDumpChannel) {
657 mRawDumpChannel->stop();
658 }
659
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700660 if (mHdrPlusRawSrcChannel) {
661 mHdrPlusRawSrcChannel->stop();
662 }
663
Thierry Strudel3d639192016-09-09 11:52:26 -0700664 // NOTE: 'camera3_stream_t *' objects are already freed at
665 // this stage by the framework
666 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
667 it != mStreamInfo.end(); it++) {
668 QCamera3ProcessingChannel *channel = (*it)->channel;
669 if (channel) {
670 channel->stop();
671 }
672 }
673 if (mSupportChannel)
674 mSupportChannel->stop();
675
676 if (mAnalysisChannel) {
677 mAnalysisChannel->stop();
678 }
679 if (mMetadataChannel) {
680 mMetadataChannel->stop();
681 }
682 if (mChannelHandle) {
683 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
684 mChannelHandle);
685 LOGD("stopping channel %d", mChannelHandle);
686 }
687
688 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
689 it != mStreamInfo.end(); it++) {
690 QCamera3ProcessingChannel *channel = (*it)->channel;
691 if (channel)
692 delete channel;
693 free (*it);
694 }
695 if (mSupportChannel) {
696 delete mSupportChannel;
697 mSupportChannel = NULL;
698 }
699
700 if (mAnalysisChannel) {
701 delete mAnalysisChannel;
702 mAnalysisChannel = NULL;
703 }
704 if (mRawDumpChannel) {
705 delete mRawDumpChannel;
706 mRawDumpChannel = NULL;
707 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700708 if (mHdrPlusRawSrcChannel) {
709 delete mHdrPlusRawSrcChannel;
710 mHdrPlusRawSrcChannel = NULL;
711 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 if (mDummyBatchChannel) {
713 delete mDummyBatchChannel;
714 mDummyBatchChannel = NULL;
715 }
716
717 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800718 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700719
720 if (mMetadataChannel) {
721 delete mMetadataChannel;
722 mMetadataChannel = NULL;
723 }
724
725 /* Clean up all channels */
726 if (mCameraInitialized) {
727 if(!mFirstConfiguration){
728 //send the last unconfigure
729 cam_stream_size_info_t stream_config_info;
730 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
731 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
732 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800733 m_bIs4KVideo ? 0 :
734 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700735 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700736 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
737 stream_config_info);
738 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
739 if (rc < 0) {
740 LOGE("set_parms failed for unconfigure");
741 }
742 }
743 deinitParameters();
744 }
745
746 if (mChannelHandle) {
747 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
748 mChannelHandle);
749 LOGH("deleting channel %d", mChannelHandle);
750 mChannelHandle = 0;
751 }
752
753 if (mState != CLOSED)
754 closeCamera();
755
756 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
757 req.mPendingBufferList.clear();
758 }
759 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700760 for (pendingRequestIterator i = mPendingRequestsList.begin();
761 i != mPendingRequestsList.end();) {
762 i = erasePendingRequest(i);
763 }
764 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
765 if (mDefaultMetadata[i])
766 free_camera_metadata(mDefaultMetadata[i]);
767
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800768 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700769
770 pthread_cond_destroy(&mRequestCond);
771
772 pthread_cond_destroy(&mBuffersCond);
773
774 pthread_mutex_destroy(&mMutex);
775 LOGD("X");
776}
777
778/*===========================================================================
779 * FUNCTION : erasePendingRequest
780 *
781 * DESCRIPTION: function to erase a desired pending request after freeing any
782 * allocated memory
783 *
784 * PARAMETERS :
785 * @i : iterator pointing to pending request to be erased
786 *
787 * RETURN : iterator pointing to the next request
788 *==========================================================================*/
789QCamera3HardwareInterface::pendingRequestIterator
790 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
791{
792 if (i->input_buffer != NULL) {
793 free(i->input_buffer);
794 i->input_buffer = NULL;
795 }
796 if (i->settings != NULL)
797 free_camera_metadata((camera_metadata_t*)i->settings);
798 return mPendingRequestsList.erase(i);
799}
800
801/*===========================================================================
802 * FUNCTION : camEvtHandle
803 *
804 * DESCRIPTION: Function registered to mm-camera-interface to handle events
805 *
806 * PARAMETERS :
807 * @camera_handle : interface layer camera handle
808 * @evt : ptr to event
809 * @user_data : user data ptr
810 *
811 * RETURN : none
812 *==========================================================================*/
813void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
814 mm_camera_event_t *evt,
815 void *user_data)
816{
817 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
818 if (obj && evt) {
819 switch(evt->server_event_type) {
820 case CAM_EVENT_TYPE_DAEMON_DIED:
821 pthread_mutex_lock(&obj->mMutex);
822 obj->mState = ERROR;
823 pthread_mutex_unlock(&obj->mMutex);
824 LOGE("Fatal, camera daemon died");
825 break;
826
827 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
828 LOGD("HAL got request pull from Daemon");
829 pthread_mutex_lock(&obj->mMutex);
830 obj->mWokenUpByDaemon = true;
831 obj->unblockRequestIfNecessary();
832 pthread_mutex_unlock(&obj->mMutex);
833 break;
834
835 default:
836 LOGW("Warning: Unhandled event %d",
837 evt->server_event_type);
838 break;
839 }
840 } else {
841 LOGE("NULL user_data/evt");
842 }
843}
844
845/*===========================================================================
846 * FUNCTION : openCamera
847 *
848 * DESCRIPTION: open camera
849 *
850 * PARAMETERS :
851 * @hw_device : double ptr for camera device struct
852 *
853 * RETURN : int32_t type of status
854 * NO_ERROR -- success
855 * none-zero failure code
856 *==========================================================================*/
857int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
858{
859 int rc = 0;
860 if (mState != CLOSED) {
861 *hw_device = NULL;
862 return PERMISSION_DENIED;
863 }
864
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700865 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800866 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700867 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
868 mCameraId);
869
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700870 if (mCameraHandle) {
871 LOGE("Failure: Camera already opened");
872 return ALREADY_EXISTS;
873 }
874
875 {
876 Mutex::Autolock l(gHdrPlusClientLock);
877 if (gEaselManagerClient.isEaselPresentOnDevice()) {
878 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
879 rc = gEaselManagerClient.resume();
880 if (rc != 0) {
881 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
882 return rc;
883 }
884 }
885 }
886
Thierry Strudel3d639192016-09-09 11:52:26 -0700887 rc = openCamera();
888 if (rc == 0) {
889 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800890 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700891 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700892
893 // Suspend Easel because opening camera failed.
894 {
895 Mutex::Autolock l(gHdrPlusClientLock);
896 if (gEaselManagerClient.isEaselPresentOnDevice()) {
897 status_t suspendErr = gEaselManagerClient.suspend();
898 if (suspendErr != 0) {
899 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
900 strerror(-suspendErr), suspendErr);
901 }
902 }
903 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800904 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700905
Thierry Strudel3d639192016-09-09 11:52:26 -0700906 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
907 mCameraId, rc);
908
909 if (rc == NO_ERROR) {
910 mState = OPENED;
911 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800912
Thierry Strudel3d639192016-09-09 11:52:26 -0700913 return rc;
914}
915
916/*===========================================================================
917 * FUNCTION : openCamera
918 *
919 * DESCRIPTION: open camera
920 *
921 * PARAMETERS : none
922 *
923 * RETURN : int32_t type of status
924 * NO_ERROR -- success
925 * none-zero failure code
926 *==========================================================================*/
927int QCamera3HardwareInterface::openCamera()
928{
929 int rc = 0;
930 char value[PROPERTY_VALUE_MAX];
931
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800932 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800933
Thierry Strudel3d639192016-09-09 11:52:26 -0700934 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
935 if (rc < 0) {
936 LOGE("Failed to reserve flash for camera id: %d",
937 mCameraId);
938 return UNKNOWN_ERROR;
939 }
940
941 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
942 if (rc) {
943 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
944 return rc;
945 }
946
947 if (!mCameraHandle) {
948 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
949 return -ENODEV;
950 }
951
952 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
953 camEvtHandle, (void *)this);
954
955 if (rc < 0) {
956 LOGE("Error, failed to register event callback");
957 /* Not closing camera here since it is already handled in destructor */
958 return FAILED_TRANSACTION;
959 }
960
961 mExifParams.debug_params =
962 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
963 if (mExifParams.debug_params) {
964 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
965 } else {
966 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
967 return NO_MEMORY;
968 }
969 mFirstConfiguration = true;
970
971 //Notify display HAL that a camera session is active.
972 //But avoid calling the same during bootup because camera service might open/close
973 //cameras at boot time during its initialization and display service will also internally
974 //wait for camera service to initialize first while calling this display API, resulting in a
975 //deadlock situation. Since boot time camera open/close calls are made only to fetch
976 //capabilities, no need of this display bw optimization.
977 //Use "service.bootanim.exit" property to know boot status.
978 property_get("service.bootanim.exit", value, "0");
979 if (atoi(value) == 1) {
980 pthread_mutex_lock(&gCamLock);
981 if (gNumCameraSessions++ == 0) {
982 setCameraLaunchStatus(true);
983 }
984 pthread_mutex_unlock(&gCamLock);
985 }
986
987 //fill the session id needed while linking dual cam
988 pthread_mutex_lock(&gCamLock);
989 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
990 &sessionId[mCameraId]);
991 pthread_mutex_unlock(&gCamLock);
992
993 if (rc < 0) {
994 LOGE("Error, failed to get sessiion id");
995 return UNKNOWN_ERROR;
996 } else {
997 //Allocate related cam sync buffer
998 //this is needed for the payload that goes along with bundling cmd for related
999 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001000 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1001 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001002 if(rc != OK) {
1003 rc = NO_MEMORY;
1004 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1005 return NO_MEMORY;
1006 }
1007
1008 //Map memory for related cam sync buffer
1009 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001010 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1011 m_pDualCamCmdHeap->getFd(0),
1012 sizeof(cam_dual_camera_cmd_info_t),
1013 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001014 if(rc < 0) {
1015 LOGE("Dualcam: failed to map Related cam sync buffer");
1016 rc = FAILED_TRANSACTION;
1017 return NO_MEMORY;
1018 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001019 m_pDualCamCmdPtr =
1020 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001021 }
1022
1023 LOGH("mCameraId=%d",mCameraId);
1024
1025 return NO_ERROR;
1026}
1027
1028/*===========================================================================
1029 * FUNCTION : closeCamera
1030 *
1031 * DESCRIPTION: close camera
1032 *
1033 * PARAMETERS : none
1034 *
1035 * RETURN : int32_t type of status
1036 * NO_ERROR -- success
1037 * none-zero failure code
1038 *==========================================================================*/
1039int QCamera3HardwareInterface::closeCamera()
1040{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001041 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001042 int rc = NO_ERROR;
1043 char value[PROPERTY_VALUE_MAX];
1044
1045 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1046 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001047
1048 // unmap memory for related cam sync buffer
1049 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001050 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001051 if (NULL != m_pDualCamCmdHeap) {
1052 m_pDualCamCmdHeap->deallocate();
1053 delete m_pDualCamCmdHeap;
1054 m_pDualCamCmdHeap = NULL;
1055 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001056 }
1057
Thierry Strudel3d639192016-09-09 11:52:26 -07001058 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1059 mCameraHandle = NULL;
1060
1061 //reset session id to some invalid id
1062 pthread_mutex_lock(&gCamLock);
1063 sessionId[mCameraId] = 0xDEADBEEF;
1064 pthread_mutex_unlock(&gCamLock);
1065
1066 //Notify display HAL that there is no active camera session
1067 //but avoid calling the same during bootup. Refer to openCamera
1068 //for more details.
1069 property_get("service.bootanim.exit", value, "0");
1070 if (atoi(value) == 1) {
1071 pthread_mutex_lock(&gCamLock);
1072 if (--gNumCameraSessions == 0) {
1073 setCameraLaunchStatus(false);
1074 }
1075 pthread_mutex_unlock(&gCamLock);
1076 }
1077
Thierry Strudel3d639192016-09-09 11:52:26 -07001078 if (mExifParams.debug_params) {
1079 free(mExifParams.debug_params);
1080 mExifParams.debug_params = NULL;
1081 }
1082 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1083 LOGW("Failed to release flash for camera id: %d",
1084 mCameraId);
1085 }
1086 mState = CLOSED;
1087 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1088 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001089
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001090 {
1091 Mutex::Autolock l(gHdrPlusClientLock);
1092 if (gHdrPlusClient != nullptr) {
1093 // Disable HDR+ mode.
1094 disableHdrPlusModeLocked();
1095 // Disconnect Easel if it's connected.
1096 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1097 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001098 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001099
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001100 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001101 rc = gEaselManagerClient.stopMipi(mCameraId);
1102 if (rc != 0) {
1103 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1104 }
1105
1106 rc = gEaselManagerClient.suspend();
1107 if (rc != 0) {
1108 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1109 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001110 }
1111 }
1112
Thierry Strudel3d639192016-09-09 11:52:26 -07001113 return rc;
1114}
1115
1116/*===========================================================================
1117 * FUNCTION : initialize
1118 *
1119 * DESCRIPTION: Initialize frameworks callback functions
1120 *
1121 * PARAMETERS :
1122 * @callback_ops : callback function to frameworks
1123 *
1124 * RETURN :
1125 *
1126 *==========================================================================*/
1127int QCamera3HardwareInterface::initialize(
1128 const struct camera3_callback_ops *callback_ops)
1129{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001130 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001131 int rc;
1132
1133 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1134 pthread_mutex_lock(&mMutex);
1135
1136 // Validate current state
1137 switch (mState) {
1138 case OPENED:
1139 /* valid state */
1140 break;
1141 default:
1142 LOGE("Invalid state %d", mState);
1143 rc = -ENODEV;
1144 goto err1;
1145 }
1146
1147 rc = initParameters();
1148 if (rc < 0) {
1149 LOGE("initParamters failed %d", rc);
1150 goto err1;
1151 }
1152 mCallbackOps = callback_ops;
1153
1154 mChannelHandle = mCameraHandle->ops->add_channel(
1155 mCameraHandle->camera_handle, NULL, NULL, this);
1156 if (mChannelHandle == 0) {
1157 LOGE("add_channel failed");
1158 rc = -ENOMEM;
1159 pthread_mutex_unlock(&mMutex);
1160 return rc;
1161 }
1162
1163 pthread_mutex_unlock(&mMutex);
1164 mCameraInitialized = true;
1165 mState = INITIALIZED;
1166 LOGI("X");
1167 return 0;
1168
1169err1:
1170 pthread_mutex_unlock(&mMutex);
1171 return rc;
1172}
1173
1174/*===========================================================================
1175 * FUNCTION : validateStreamDimensions
1176 *
1177 * DESCRIPTION: Check if the configuration requested are those advertised
1178 *
1179 * PARAMETERS :
1180 * @stream_list : streams to be configured
1181 *
1182 * RETURN :
1183 *
1184 *==========================================================================*/
1185int QCamera3HardwareInterface::validateStreamDimensions(
1186 camera3_stream_configuration_t *streamList)
1187{
1188 int rc = NO_ERROR;
1189 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001190 uint32_t depthWidth = 0;
1191 uint32_t depthHeight = 0;
1192 if (mPDSupported) {
1193 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1194 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1195 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001196
1197 camera3_stream_t *inputStream = NULL;
1198 /*
1199 * Loop through all streams to find input stream if it exists*
1200 */
1201 for (size_t i = 0; i< streamList->num_streams; i++) {
1202 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1203 if (inputStream != NULL) {
1204 LOGE("Error, Multiple input streams requested");
1205 return -EINVAL;
1206 }
1207 inputStream = streamList->streams[i];
1208 }
1209 }
1210 /*
1211 * Loop through all streams requested in configuration
1212 * Check if unsupported sizes have been requested on any of them
1213 */
1214 for (size_t j = 0; j < streamList->num_streams; j++) {
1215 bool sizeFound = false;
1216 camera3_stream_t *newStream = streamList->streams[j];
1217
1218 uint32_t rotatedHeight = newStream->height;
1219 uint32_t rotatedWidth = newStream->width;
1220 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1221 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1222 rotatedHeight = newStream->width;
1223 rotatedWidth = newStream->height;
1224 }
1225
1226 /*
1227 * Sizes are different for each type of stream format check against
1228 * appropriate table.
1229 */
1230 switch (newStream->format) {
1231 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1232 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1233 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001234 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1235 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1236 mPDSupported) {
1237 if ((depthWidth == newStream->width) &&
1238 (depthHeight == newStream->height)) {
1239 sizeFound = true;
1240 }
1241 break;
1242 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001243 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1244 for (size_t i = 0; i < count; i++) {
1245 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1246 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1247 sizeFound = true;
1248 break;
1249 }
1250 }
1251 break;
1252 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001253 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1254 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001255 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001256 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001257 if ((depthSamplesCount == newStream->width) &&
1258 (1 == newStream->height)) {
1259 sizeFound = true;
1260 }
1261 break;
1262 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001263 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1264 /* Verify set size against generated sizes table */
1265 for (size_t i = 0; i < count; i++) {
1266 if (((int32_t)rotatedWidth ==
1267 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1268 ((int32_t)rotatedHeight ==
1269 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1270 sizeFound = true;
1271 break;
1272 }
1273 }
1274 break;
1275 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1276 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1277 default:
1278 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1279 || newStream->stream_type == CAMERA3_STREAM_INPUT
1280 || IS_USAGE_ZSL(newStream->usage)) {
1281 if (((int32_t)rotatedWidth ==
1282 gCamCapability[mCameraId]->active_array_size.width) &&
1283 ((int32_t)rotatedHeight ==
1284 gCamCapability[mCameraId]->active_array_size.height)) {
1285 sizeFound = true;
1286 break;
1287 }
1288 /* We could potentially break here to enforce ZSL stream
1289 * set from frameworks always is full active array size
1290 * but it is not clear from the spc if framework will always
1291 * follow that, also we have logic to override to full array
1292 * size, so keeping the logic lenient at the moment
1293 */
1294 }
1295 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1296 MAX_SIZES_CNT);
1297 for (size_t i = 0; i < count; i++) {
1298 if (((int32_t)rotatedWidth ==
1299 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1300 ((int32_t)rotatedHeight ==
1301 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1302 sizeFound = true;
1303 break;
1304 }
1305 }
1306 break;
1307 } /* End of switch(newStream->format) */
1308
1309 /* We error out even if a single stream has unsupported size set */
1310 if (!sizeFound) {
1311 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1312 rotatedWidth, rotatedHeight, newStream->format,
1313 gCamCapability[mCameraId]->active_array_size.width,
1314 gCamCapability[mCameraId]->active_array_size.height);
1315 rc = -EINVAL;
1316 break;
1317 }
1318 } /* End of for each stream */
1319 return rc;
1320}
1321
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001322/*===========================================================================
1323 * FUNCTION : validateUsageFlags
1324 *
1325 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1326 *
1327 * PARAMETERS :
1328 * @stream_list : streams to be configured
1329 *
1330 * RETURN :
1331 * NO_ERROR if the usage flags are supported
1332 * error code if usage flags are not supported
1333 *
1334 *==========================================================================*/
1335int QCamera3HardwareInterface::validateUsageFlags(
1336 const camera3_stream_configuration_t* streamList)
1337{
1338 for (size_t j = 0; j < streamList->num_streams; j++) {
1339 const camera3_stream_t *newStream = streamList->streams[j];
1340
1341 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1342 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1343 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1344 continue;
1345 }
1346
1347 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1348 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1349 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1350 bool forcePreviewUBWC = true;
1351 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1352 forcePreviewUBWC = false;
1353 }
1354 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1355 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1356 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1357 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1358 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1359 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1360
1361 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1362 // So color spaces will always match.
1363
1364 // Check whether underlying formats of shared streams match.
1365 if (isVideo && isPreview && videoFormat != previewFormat) {
1366 LOGE("Combined video and preview usage flag is not supported");
1367 return -EINVAL;
1368 }
1369 if (isPreview && isZSL && previewFormat != zslFormat) {
1370 LOGE("Combined preview and zsl usage flag is not supported");
1371 return -EINVAL;
1372 }
1373 if (isVideo && isZSL && videoFormat != zslFormat) {
1374 LOGE("Combined video and zsl usage flag is not supported");
1375 return -EINVAL;
1376 }
1377 }
1378 return NO_ERROR;
1379}
1380
1381/*===========================================================================
1382 * FUNCTION : validateUsageFlagsForEis
1383 *
1384 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1385 *
1386 * PARAMETERS :
1387 * @stream_list : streams to be configured
1388 *
1389 * RETURN :
1390 * NO_ERROR if the usage flags are supported
1391 * error code if usage flags are not supported
1392 *
1393 *==========================================================================*/
1394int QCamera3HardwareInterface::validateUsageFlagsForEis(
1395 const camera3_stream_configuration_t* streamList)
1396{
1397 for (size_t j = 0; j < streamList->num_streams; j++) {
1398 const camera3_stream_t *newStream = streamList->streams[j];
1399
1400 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1401 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1402
1403 // Because EIS is "hard-coded" for certain use case, and current
1404 // implementation doesn't support shared preview and video on the same
1405 // stream, return failure if EIS is forced on.
1406 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1407 LOGE("Combined video and preview usage flag is not supported due to EIS");
1408 return -EINVAL;
1409 }
1410 }
1411 return NO_ERROR;
1412}
1413
Thierry Strudel3d639192016-09-09 11:52:26 -07001414/*==============================================================================
1415 * FUNCTION : isSupportChannelNeeded
1416 *
1417 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1418 *
1419 * PARAMETERS :
1420 * @stream_list : streams to be configured
1421 * @stream_config_info : the config info for streams to be configured
1422 *
1423 * RETURN : Boolen true/false decision
1424 *
1425 *==========================================================================*/
1426bool QCamera3HardwareInterface::isSupportChannelNeeded(
1427 camera3_stream_configuration_t *streamList,
1428 cam_stream_size_info_t stream_config_info)
1429{
1430 uint32_t i;
1431 bool pprocRequested = false;
1432 /* Check for conditions where PProc pipeline does not have any streams*/
1433 for (i = 0; i < stream_config_info.num_streams; i++) {
1434 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1435 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1436 pprocRequested = true;
1437 break;
1438 }
1439 }
1440
1441 if (pprocRequested == false )
1442 return true;
1443
1444 /* Dummy stream needed if only raw or jpeg streams present */
1445 for (i = 0; i < streamList->num_streams; i++) {
1446 switch(streamList->streams[i]->format) {
1447 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1448 case HAL_PIXEL_FORMAT_RAW10:
1449 case HAL_PIXEL_FORMAT_RAW16:
1450 case HAL_PIXEL_FORMAT_BLOB:
1451 break;
1452 default:
1453 return false;
1454 }
1455 }
1456 return true;
1457}
1458
1459/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001460 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001461 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001462 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001463 *
1464 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001465 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001466 *
1467 * RETURN : int32_t type of status
1468 * NO_ERROR -- success
1469 * none-zero failure code
1470 *
1471 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001473{
1474 int32_t rc = NO_ERROR;
1475
1476 cam_dimension_t max_dim = {0, 0};
1477 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1478 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1479 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1480 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1481 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1482 }
1483
1484 clear_metadata_buffer(mParameters);
1485
1486 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1487 max_dim);
1488 if (rc != NO_ERROR) {
1489 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1490 return rc;
1491 }
1492
1493 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1494 if (rc != NO_ERROR) {
1495 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1496 return rc;
1497 }
1498
1499 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001500 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001501
1502 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1503 mParameters);
1504 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001505 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001506 return rc;
1507 }
1508
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001509 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001510 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1511 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1512 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1513 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1514 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001515
1516 return rc;
1517}
1518
1519/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001520 * FUNCTION : getCurrentSensorModeInfo
1521 *
1522 * DESCRIPTION: Get sensor mode information that is currently selected.
1523 *
1524 * PARAMETERS :
1525 * @sensorModeInfo : sensor mode information (output)
1526 *
1527 * RETURN : int32_t type of status
1528 * NO_ERROR -- success
1529 * none-zero failure code
1530 *
1531 *==========================================================================*/
1532int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1533{
1534 int32_t rc = NO_ERROR;
1535
1536 clear_metadata_buffer(mParameters);
1537 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1538
1539 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1540 mParameters);
1541 if (rc != NO_ERROR) {
1542 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1543 return rc;
1544 }
1545
1546 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1547 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1548 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1549 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1550 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1551 sensorModeInfo.num_raw_bits);
1552
1553 return rc;
1554}
1555
1556/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001557 * FUNCTION : addToPPFeatureMask
1558 *
1559 * DESCRIPTION: add additional features to pp feature mask based on
1560 * stream type and usecase
1561 *
1562 * PARAMETERS :
1563 * @stream_format : stream type for feature mask
1564 * @stream_idx : stream idx within postprocess_mask list to change
1565 *
1566 * RETURN : NULL
1567 *
1568 *==========================================================================*/
1569void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1570 uint32_t stream_idx)
1571{
1572 char feature_mask_value[PROPERTY_VALUE_MAX];
1573 cam_feature_mask_t feature_mask;
1574 int args_converted;
1575 int property_len;
1576
1577 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001578#ifdef _LE_CAMERA_
1579 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1580 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1581 property_len = property_get("persist.camera.hal3.feature",
1582 feature_mask_value, swtnr_feature_mask_value);
1583#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001584 property_len = property_get("persist.camera.hal3.feature",
1585 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001586#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001587 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1588 (feature_mask_value[1] == 'x')) {
1589 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1590 } else {
1591 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1592 }
1593 if (1 != args_converted) {
1594 feature_mask = 0;
1595 LOGE("Wrong feature mask %s", feature_mask_value);
1596 return;
1597 }
1598
1599 switch (stream_format) {
1600 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1601 /* Add LLVD to pp feature mask only if video hint is enabled */
1602 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1603 mStreamConfigInfo.postprocess_mask[stream_idx]
1604 |= CAM_QTI_FEATURE_SW_TNR;
1605 LOGH("Added SW TNR to pp feature mask");
1606 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1607 mStreamConfigInfo.postprocess_mask[stream_idx]
1608 |= CAM_QCOM_FEATURE_LLVD;
1609 LOGH("Added LLVD SeeMore to pp feature mask");
1610 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001611 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1612 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1613 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1614 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001615 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1616 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1617 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1618 CAM_QTI_FEATURE_BINNING_CORRECTION;
1619 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001620 break;
1621 }
1622 default:
1623 break;
1624 }
1625 LOGD("PP feature mask %llx",
1626 mStreamConfigInfo.postprocess_mask[stream_idx]);
1627}
1628
1629/*==============================================================================
1630 * FUNCTION : updateFpsInPreviewBuffer
1631 *
1632 * DESCRIPTION: update FPS information in preview buffer.
1633 *
1634 * PARAMETERS :
1635 * @metadata : pointer to metadata buffer
1636 * @frame_number: frame_number to look for in pending buffer list
1637 *
1638 * RETURN : None
1639 *
1640 *==========================================================================*/
1641void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1642 uint32_t frame_number)
1643{
1644 // Mark all pending buffers for this particular request
1645 // with corresponding framerate information
1646 for (List<PendingBuffersInRequest>::iterator req =
1647 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1648 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1649 for(List<PendingBufferInfo>::iterator j =
1650 req->mPendingBufferList.begin();
1651 j != req->mPendingBufferList.end(); j++) {
1652 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1653 if ((req->frame_number == frame_number) &&
1654 (channel->getStreamTypeMask() &
1655 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1656 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1657 CAM_INTF_PARM_FPS_RANGE, metadata) {
1658 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1659 struct private_handle_t *priv_handle =
1660 (struct private_handle_t *)(*(j->buffer));
1661 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1662 }
1663 }
1664 }
1665 }
1666}
1667
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001668/*==============================================================================
1669 * FUNCTION : updateTimeStampInPendingBuffers
1670 *
1671 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1672 * of a frame number
1673 *
1674 * PARAMETERS :
1675 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1676 * @timestamp : timestamp to be set
1677 *
1678 * RETURN : None
1679 *
1680 *==========================================================================*/
1681void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1682 uint32_t frameNumber, nsecs_t timestamp)
1683{
1684 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1685 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1686 if (req->frame_number != frameNumber)
1687 continue;
1688
1689 for (auto k = req->mPendingBufferList.begin();
1690 k != req->mPendingBufferList.end(); k++ ) {
1691 struct private_handle_t *priv_handle =
1692 (struct private_handle_t *) (*(k->buffer));
1693 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1694 }
1695 }
1696 return;
1697}
1698
Thierry Strudel3d639192016-09-09 11:52:26 -07001699/*===========================================================================
1700 * FUNCTION : configureStreams
1701 *
1702 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1703 * and output streams.
1704 *
1705 * PARAMETERS :
1706 * @stream_list : streams to be configured
1707 *
1708 * RETURN :
1709 *
1710 *==========================================================================*/
1711int QCamera3HardwareInterface::configureStreams(
1712 camera3_stream_configuration_t *streamList)
1713{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001714 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001715 int rc = 0;
1716
1717 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001718 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001719 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001720 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001721
1722 return rc;
1723}
1724
1725/*===========================================================================
1726 * FUNCTION : configureStreamsPerfLocked
1727 *
1728 * DESCRIPTION: configureStreams while perfLock is held.
1729 *
1730 * PARAMETERS :
1731 * @stream_list : streams to be configured
1732 *
1733 * RETURN : int32_t type of status
1734 * NO_ERROR -- success
1735 * none-zero failure code
1736 *==========================================================================*/
1737int QCamera3HardwareInterface::configureStreamsPerfLocked(
1738 camera3_stream_configuration_t *streamList)
1739{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001740 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001741 int rc = 0;
1742
1743 // Sanity check stream_list
1744 if (streamList == NULL) {
1745 LOGE("NULL stream configuration");
1746 return BAD_VALUE;
1747 }
1748 if (streamList->streams == NULL) {
1749 LOGE("NULL stream list");
1750 return BAD_VALUE;
1751 }
1752
1753 if (streamList->num_streams < 1) {
1754 LOGE("Bad number of streams requested: %d",
1755 streamList->num_streams);
1756 return BAD_VALUE;
1757 }
1758
1759 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1760 LOGE("Maximum number of streams %d exceeded: %d",
1761 MAX_NUM_STREAMS, streamList->num_streams);
1762 return BAD_VALUE;
1763 }
1764
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001765 rc = validateUsageFlags(streamList);
1766 if (rc != NO_ERROR) {
1767 return rc;
1768 }
1769
Thierry Strudel3d639192016-09-09 11:52:26 -07001770 mOpMode = streamList->operation_mode;
1771 LOGD("mOpMode: %d", mOpMode);
1772
1773 /* first invalidate all the steams in the mStreamList
1774 * if they appear again, they will be validated */
1775 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1776 it != mStreamInfo.end(); it++) {
1777 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1778 if (channel) {
1779 channel->stop();
1780 }
1781 (*it)->status = INVALID;
1782 }
1783
1784 if (mRawDumpChannel) {
1785 mRawDumpChannel->stop();
1786 delete mRawDumpChannel;
1787 mRawDumpChannel = NULL;
1788 }
1789
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001790 if (mHdrPlusRawSrcChannel) {
1791 mHdrPlusRawSrcChannel->stop();
1792 delete mHdrPlusRawSrcChannel;
1793 mHdrPlusRawSrcChannel = NULL;
1794 }
1795
Thierry Strudel3d639192016-09-09 11:52:26 -07001796 if (mSupportChannel)
1797 mSupportChannel->stop();
1798
1799 if (mAnalysisChannel) {
1800 mAnalysisChannel->stop();
1801 }
1802 if (mMetadataChannel) {
1803 /* If content of mStreamInfo is not 0, there is metadata stream */
1804 mMetadataChannel->stop();
1805 }
1806 if (mChannelHandle) {
1807 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1808 mChannelHandle);
1809 LOGD("stopping channel %d", mChannelHandle);
1810 }
1811
1812 pthread_mutex_lock(&mMutex);
1813
1814 // Check state
1815 switch (mState) {
1816 case INITIALIZED:
1817 case CONFIGURED:
1818 case STARTED:
1819 /* valid state */
1820 break;
1821 default:
1822 LOGE("Invalid state %d", mState);
1823 pthread_mutex_unlock(&mMutex);
1824 return -ENODEV;
1825 }
1826
1827 /* Check whether we have video stream */
1828 m_bIs4KVideo = false;
1829 m_bIsVideo = false;
1830 m_bEisSupportedSize = false;
1831 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001832 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001833 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001834 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001835 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001836 uint32_t videoWidth = 0U;
1837 uint32_t videoHeight = 0U;
1838 size_t rawStreamCnt = 0;
1839 size_t stallStreamCnt = 0;
1840 size_t processedStreamCnt = 0;
1841 // Number of streams on ISP encoder path
1842 size_t numStreamsOnEncoder = 0;
1843 size_t numYuv888OnEncoder = 0;
1844 bool bYuv888OverrideJpeg = false;
1845 cam_dimension_t largeYuv888Size = {0, 0};
1846 cam_dimension_t maxViewfinderSize = {0, 0};
1847 bool bJpegExceeds4K = false;
1848 bool bJpegOnEncoder = false;
1849 bool bUseCommonFeatureMask = false;
1850 cam_feature_mask_t commonFeatureMask = 0;
1851 bool bSmallJpegSize = false;
1852 uint32_t width_ratio;
1853 uint32_t height_ratio;
1854 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1855 camera3_stream_t *inputStream = NULL;
1856 bool isJpeg = false;
1857 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001858 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001859 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001860
1861 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1862
1863 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001864 uint8_t eis_prop_set;
1865 uint32_t maxEisWidth = 0;
1866 uint32_t maxEisHeight = 0;
1867
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001868 // Initialize all instant AEC related variables
1869 mInstantAEC = false;
1870 mResetInstantAEC = false;
1871 mInstantAECSettledFrameNumber = 0;
1872 mAecSkipDisplayFrameBound = 0;
1873 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001874 mCurrFeatureState = 0;
1875 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001876
Thierry Strudel3d639192016-09-09 11:52:26 -07001877 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1878
1879 size_t count = IS_TYPE_MAX;
1880 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1881 for (size_t i = 0; i < count; i++) {
1882 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001883 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1884 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001885 break;
1886 }
1887 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001888
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001889 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001890 maxEisWidth = MAX_EIS_WIDTH;
1891 maxEisHeight = MAX_EIS_HEIGHT;
1892 }
1893
1894 /* EIS setprop control */
1895 char eis_prop[PROPERTY_VALUE_MAX];
1896 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001897 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001898 eis_prop_set = (uint8_t)atoi(eis_prop);
1899
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001900 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001901 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1902
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001903 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1904 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001905
Thierry Strudel3d639192016-09-09 11:52:26 -07001906 /* stream configurations */
1907 for (size_t i = 0; i < streamList->num_streams; i++) {
1908 camera3_stream_t *newStream = streamList->streams[i];
1909 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1910 "height = %d, rotation = %d, usage = 0x%x",
1911 i, newStream->stream_type, newStream->format,
1912 newStream->width, newStream->height, newStream->rotation,
1913 newStream->usage);
1914 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1915 newStream->stream_type == CAMERA3_STREAM_INPUT){
1916 isZsl = true;
1917 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001918 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1919 IS_USAGE_PREVIEW(newStream->usage)) {
1920 isPreview = true;
1921 }
1922
Thierry Strudel3d639192016-09-09 11:52:26 -07001923 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1924 inputStream = newStream;
1925 }
1926
Emilian Peev7650c122017-01-19 08:24:33 -08001927 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1928 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001929 isJpeg = true;
1930 jpegSize.width = newStream->width;
1931 jpegSize.height = newStream->height;
1932 if (newStream->width > VIDEO_4K_WIDTH ||
1933 newStream->height > VIDEO_4K_HEIGHT)
1934 bJpegExceeds4K = true;
1935 }
1936
1937 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1938 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1939 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001940 // In HAL3 we can have multiple different video streams.
1941 // The variables video width and height are used below as
1942 // dimensions of the biggest of them
1943 if (videoWidth < newStream->width ||
1944 videoHeight < newStream->height) {
1945 videoWidth = newStream->width;
1946 videoHeight = newStream->height;
1947 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001948 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1949 (VIDEO_4K_HEIGHT <= newStream->height)) {
1950 m_bIs4KVideo = true;
1951 }
1952 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1953 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001954
Thierry Strudel3d639192016-09-09 11:52:26 -07001955 }
1956 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1957 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1958 switch (newStream->format) {
1959 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001960 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1961 depthPresent = true;
1962 break;
1963 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001964 stallStreamCnt++;
1965 if (isOnEncoder(maxViewfinderSize, newStream->width,
1966 newStream->height)) {
1967 numStreamsOnEncoder++;
1968 bJpegOnEncoder = true;
1969 }
1970 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1971 newStream->width);
1972 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1973 newStream->height);;
1974 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1975 "FATAL: max_downscale_factor cannot be zero and so assert");
1976 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1977 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1978 LOGH("Setting small jpeg size flag to true");
1979 bSmallJpegSize = true;
1980 }
1981 break;
1982 case HAL_PIXEL_FORMAT_RAW10:
1983 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1984 case HAL_PIXEL_FORMAT_RAW16:
1985 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001986 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1987 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1988 pdStatCount++;
1989 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001990 break;
1991 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1992 processedStreamCnt++;
1993 if (isOnEncoder(maxViewfinderSize, newStream->width,
1994 newStream->height)) {
1995 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1996 !IS_USAGE_ZSL(newStream->usage)) {
1997 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1998 }
1999 numStreamsOnEncoder++;
2000 }
2001 break;
2002 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2003 processedStreamCnt++;
2004 if (isOnEncoder(maxViewfinderSize, newStream->width,
2005 newStream->height)) {
2006 // If Yuv888 size is not greater than 4K, set feature mask
2007 // to SUPERSET so that it support concurrent request on
2008 // YUV and JPEG.
2009 if (newStream->width <= VIDEO_4K_WIDTH &&
2010 newStream->height <= VIDEO_4K_HEIGHT) {
2011 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2012 }
2013 numStreamsOnEncoder++;
2014 numYuv888OnEncoder++;
2015 largeYuv888Size.width = newStream->width;
2016 largeYuv888Size.height = newStream->height;
2017 }
2018 break;
2019 default:
2020 processedStreamCnt++;
2021 if (isOnEncoder(maxViewfinderSize, newStream->width,
2022 newStream->height)) {
2023 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2024 numStreamsOnEncoder++;
2025 }
2026 break;
2027 }
2028
2029 }
2030 }
2031
2032 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2033 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2034 !m_bIsVideo) {
2035 m_bEisEnable = false;
2036 }
2037
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002038 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2039 pthread_mutex_unlock(&mMutex);
2040 return -EINVAL;
2041 }
2042
Thierry Strudel54dc9782017-02-15 12:12:10 -08002043 uint8_t forceEnableTnr = 0;
2044 char tnr_prop[PROPERTY_VALUE_MAX];
2045 memset(tnr_prop, 0, sizeof(tnr_prop));
2046 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2047 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2048
Thierry Strudel3d639192016-09-09 11:52:26 -07002049 /* Logic to enable/disable TNR based on specific config size/etc.*/
2050 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002051 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2052 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002053 else if (forceEnableTnr)
2054 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002055
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002056 char videoHdrProp[PROPERTY_VALUE_MAX];
2057 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2058 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2059 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2060
2061 if (hdr_mode_prop == 1 && m_bIsVideo &&
2062 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2063 m_bVideoHdrEnabled = true;
2064 else
2065 m_bVideoHdrEnabled = false;
2066
2067
Thierry Strudel3d639192016-09-09 11:52:26 -07002068 /* Check if num_streams is sane */
2069 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2070 rawStreamCnt > MAX_RAW_STREAMS ||
2071 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2072 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2073 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2074 pthread_mutex_unlock(&mMutex);
2075 return -EINVAL;
2076 }
2077 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002078 if (isZsl && m_bIs4KVideo) {
2079 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002080 pthread_mutex_unlock(&mMutex);
2081 return -EINVAL;
2082 }
2083 /* Check if stream sizes are sane */
2084 if (numStreamsOnEncoder > 2) {
2085 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2086 pthread_mutex_unlock(&mMutex);
2087 return -EINVAL;
2088 } else if (1 < numStreamsOnEncoder){
2089 bUseCommonFeatureMask = true;
2090 LOGH("Multiple streams above max viewfinder size, common mask needed");
2091 }
2092
2093 /* Check if BLOB size is greater than 4k in 4k recording case */
2094 if (m_bIs4KVideo && bJpegExceeds4K) {
2095 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2096 pthread_mutex_unlock(&mMutex);
2097 return -EINVAL;
2098 }
2099
Emilian Peev7650c122017-01-19 08:24:33 -08002100 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2101 depthPresent) {
2102 LOGE("HAL doesn't support depth streams in HFR mode!");
2103 pthread_mutex_unlock(&mMutex);
2104 return -EINVAL;
2105 }
2106
Thierry Strudel3d639192016-09-09 11:52:26 -07002107 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2108 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2109 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2110 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2111 // configurations:
2112 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2113 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2114 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2115 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2116 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2117 __func__);
2118 pthread_mutex_unlock(&mMutex);
2119 return -EINVAL;
2120 }
2121
2122 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2123 // the YUV stream's size is greater or equal to the JPEG size, set common
2124 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2125 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2126 jpegSize.width, jpegSize.height) &&
2127 largeYuv888Size.width > jpegSize.width &&
2128 largeYuv888Size.height > jpegSize.height) {
2129 bYuv888OverrideJpeg = true;
2130 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2131 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2132 }
2133
2134 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2135 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2136 commonFeatureMask);
2137 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2138 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2139
2140 rc = validateStreamDimensions(streamList);
2141 if (rc == NO_ERROR) {
2142 rc = validateStreamRotations(streamList);
2143 }
2144 if (rc != NO_ERROR) {
2145 LOGE("Invalid stream configuration requested!");
2146 pthread_mutex_unlock(&mMutex);
2147 return rc;
2148 }
2149
Emilian Peev0f3c3162017-03-15 12:57:46 +00002150 if (1 < pdStatCount) {
2151 LOGE("HAL doesn't support multiple PD streams");
2152 pthread_mutex_unlock(&mMutex);
2153 return -EINVAL;
2154 }
2155
2156 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2157 (1 == pdStatCount)) {
2158 LOGE("HAL doesn't support PD streams in HFR mode!");
2159 pthread_mutex_unlock(&mMutex);
2160 return -EINVAL;
2161 }
2162
Thierry Strudel3d639192016-09-09 11:52:26 -07002163 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2164 for (size_t i = 0; i < streamList->num_streams; i++) {
2165 camera3_stream_t *newStream = streamList->streams[i];
2166 LOGH("newStream type = %d, stream format = %d "
2167 "stream size : %d x %d, stream rotation = %d",
2168 newStream->stream_type, newStream->format,
2169 newStream->width, newStream->height, newStream->rotation);
2170 //if the stream is in the mStreamList validate it
2171 bool stream_exists = false;
2172 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2173 it != mStreamInfo.end(); it++) {
2174 if ((*it)->stream == newStream) {
2175 QCamera3ProcessingChannel *channel =
2176 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2177 stream_exists = true;
2178 if (channel)
2179 delete channel;
2180 (*it)->status = VALID;
2181 (*it)->stream->priv = NULL;
2182 (*it)->channel = NULL;
2183 }
2184 }
2185 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2186 //new stream
2187 stream_info_t* stream_info;
2188 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2189 if (!stream_info) {
2190 LOGE("Could not allocate stream info");
2191 rc = -ENOMEM;
2192 pthread_mutex_unlock(&mMutex);
2193 return rc;
2194 }
2195 stream_info->stream = newStream;
2196 stream_info->status = VALID;
2197 stream_info->channel = NULL;
2198 mStreamInfo.push_back(stream_info);
2199 }
2200 /* Covers Opaque ZSL and API1 F/W ZSL */
2201 if (IS_USAGE_ZSL(newStream->usage)
2202 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2203 if (zslStream != NULL) {
2204 LOGE("Multiple input/reprocess streams requested!");
2205 pthread_mutex_unlock(&mMutex);
2206 return BAD_VALUE;
2207 }
2208 zslStream = newStream;
2209 }
2210 /* Covers YUV reprocess */
2211 if (inputStream != NULL) {
2212 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2213 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2214 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2215 && inputStream->width == newStream->width
2216 && inputStream->height == newStream->height) {
2217 if (zslStream != NULL) {
2218 /* This scenario indicates multiple YUV streams with same size
2219 * as input stream have been requested, since zsl stream handle
2220 * is solely use for the purpose of overriding the size of streams
2221 * which share h/w streams we will just make a guess here as to
2222 * which of the stream is a ZSL stream, this will be refactored
2223 * once we make generic logic for streams sharing encoder output
2224 */
2225 LOGH("Warning, Multiple ip/reprocess streams requested!");
2226 }
2227 zslStream = newStream;
2228 }
2229 }
2230 }
2231
2232 /* If a zsl stream is set, we know that we have configured at least one input or
2233 bidirectional stream */
2234 if (NULL != zslStream) {
2235 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2236 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2237 mInputStreamInfo.format = zslStream->format;
2238 mInputStreamInfo.usage = zslStream->usage;
2239 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2240 mInputStreamInfo.dim.width,
2241 mInputStreamInfo.dim.height,
2242 mInputStreamInfo.format, mInputStreamInfo.usage);
2243 }
2244
2245 cleanAndSortStreamInfo();
2246 if (mMetadataChannel) {
2247 delete mMetadataChannel;
2248 mMetadataChannel = NULL;
2249 }
2250 if (mSupportChannel) {
2251 delete mSupportChannel;
2252 mSupportChannel = NULL;
2253 }
2254
2255 if (mAnalysisChannel) {
2256 delete mAnalysisChannel;
2257 mAnalysisChannel = NULL;
2258 }
2259
2260 if (mDummyBatchChannel) {
2261 delete mDummyBatchChannel;
2262 mDummyBatchChannel = NULL;
2263 }
2264
Emilian Peev7650c122017-01-19 08:24:33 -08002265 if (mDepthChannel) {
2266 mDepthChannel = NULL;
2267 }
2268
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002269 mShutterDispatcher.clear();
2270 mOutputBufferDispatcher.clear();
2271
Thierry Strudel2896d122017-02-23 19:18:03 -08002272 char is_type_value[PROPERTY_VALUE_MAX];
2273 property_get("persist.camera.is_type", is_type_value, "4");
2274 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2275
Binhao Line406f062017-05-03 14:39:44 -07002276 char property_value[PROPERTY_VALUE_MAX];
2277 property_get("persist.camera.gzoom.at", property_value, "0");
2278 int goog_zoom_at = atoi(property_value);
2279 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2280 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2281
2282 property_get("persist.camera.gzoom.4k", property_value, "0");
2283 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2284
Thierry Strudel3d639192016-09-09 11:52:26 -07002285 //Create metadata channel and initialize it
2286 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2287 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2288 gCamCapability[mCameraId]->color_arrangement);
2289 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2290 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002291 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002292 if (mMetadataChannel == NULL) {
2293 LOGE("failed to allocate metadata channel");
2294 rc = -ENOMEM;
2295 pthread_mutex_unlock(&mMutex);
2296 return rc;
2297 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002298 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002299 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2300 if (rc < 0) {
2301 LOGE("metadata channel initialization failed");
2302 delete mMetadataChannel;
2303 mMetadataChannel = NULL;
2304 pthread_mutex_unlock(&mMutex);
2305 return rc;
2306 }
2307
Thierry Strudel2896d122017-02-23 19:18:03 -08002308 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002309 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002310 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002311 // Keep track of preview/video streams indices.
2312 // There could be more than one preview streams, but only one video stream.
2313 int32_t video_stream_idx = -1;
2314 int32_t preview_stream_idx[streamList->num_streams];
2315 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002316 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2317 /* Allocate channel objects for the requested streams */
2318 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002319
Thierry Strudel3d639192016-09-09 11:52:26 -07002320 camera3_stream_t *newStream = streamList->streams[i];
2321 uint32_t stream_usage = newStream->usage;
2322 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2323 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2324 struct camera_info *p_info = NULL;
2325 pthread_mutex_lock(&gCamLock);
2326 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2327 pthread_mutex_unlock(&gCamLock);
2328 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2329 || IS_USAGE_ZSL(newStream->usage)) &&
2330 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002331 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002332 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002333 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2334 if (bUseCommonFeatureMask)
2335 zsl_ppmask = commonFeatureMask;
2336 else
2337 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002338 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002339 if (numStreamsOnEncoder > 0)
2340 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2341 else
2342 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002343 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002344 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002345 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002346 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002347 LOGH("Input stream configured, reprocess config");
2348 } else {
2349 //for non zsl streams find out the format
2350 switch (newStream->format) {
2351 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2352 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002353 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002354 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2355 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2356 /* add additional features to pp feature mask */
2357 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2358 mStreamConfigInfo.num_streams);
2359
2360 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2361 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2362 CAM_STREAM_TYPE_VIDEO;
2363 if (m_bTnrEnabled && m_bTnrVideo) {
2364 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2365 CAM_QCOM_FEATURE_CPP_TNR;
2366 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2367 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2368 ~CAM_QCOM_FEATURE_CDS;
2369 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002370 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2371 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2372 CAM_QTI_FEATURE_PPEISCORE;
2373 }
Binhao Line406f062017-05-03 14:39:44 -07002374 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2375 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2376 CAM_QCOM_FEATURE_GOOG_ZOOM;
2377 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002378 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002379 } else {
2380 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2381 CAM_STREAM_TYPE_PREVIEW;
2382 if (m_bTnrEnabled && m_bTnrPreview) {
2383 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2384 CAM_QCOM_FEATURE_CPP_TNR;
2385 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2386 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2387 ~CAM_QCOM_FEATURE_CDS;
2388 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002389 if(!m_bSwTnrPreview) {
2390 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2391 ~CAM_QTI_FEATURE_SW_TNR;
2392 }
Binhao Line406f062017-05-03 14:39:44 -07002393 if (is_goog_zoom_preview_enabled) {
2394 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2395 CAM_QCOM_FEATURE_GOOG_ZOOM;
2396 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002397 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002398 padding_info.width_padding = mSurfaceStridePadding;
2399 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002400 previewSize.width = (int32_t)newStream->width;
2401 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002402 }
2403 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2404 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2406 newStream->height;
2407 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2408 newStream->width;
2409 }
2410 }
2411 break;
2412 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002413 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002414 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2415 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2416 if (bUseCommonFeatureMask)
2417 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2418 commonFeatureMask;
2419 else
2420 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2421 CAM_QCOM_FEATURE_NONE;
2422 } else {
2423 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2424 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2425 }
2426 break;
2427 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002428 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002429 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2430 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2431 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2432 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2433 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002434 /* Remove rotation if it is not supported
2435 for 4K LiveVideo snapshot case (online processing) */
2436 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2437 CAM_QCOM_FEATURE_ROTATION)) {
2438 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2439 &= ~CAM_QCOM_FEATURE_ROTATION;
2440 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002441 } else {
2442 if (bUseCommonFeatureMask &&
2443 isOnEncoder(maxViewfinderSize, newStream->width,
2444 newStream->height)) {
2445 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2446 } else {
2447 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2448 }
2449 }
2450 if (isZsl) {
2451 if (zslStream) {
2452 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2453 (int32_t)zslStream->width;
2454 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2455 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002456 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2457 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002458 } else {
2459 LOGE("Error, No ZSL stream identified");
2460 pthread_mutex_unlock(&mMutex);
2461 return -EINVAL;
2462 }
2463 } else if (m_bIs4KVideo) {
2464 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2465 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2466 } else if (bYuv888OverrideJpeg) {
2467 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2468 (int32_t)largeYuv888Size.width;
2469 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2470 (int32_t)largeYuv888Size.height;
2471 }
2472 break;
2473 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2474 case HAL_PIXEL_FORMAT_RAW16:
2475 case HAL_PIXEL_FORMAT_RAW10:
2476 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2477 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2478 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002479 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2480 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2481 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2482 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2483 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2484 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2485 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2486 gCamCapability[mCameraId]->dt[mPDIndex];
2487 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2488 gCamCapability[mCameraId]->vc[mPDIndex];
2489 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002490 break;
2491 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002492 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002493 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2494 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2495 break;
2496 }
2497 }
2498
2499 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2500 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2501 gCamCapability[mCameraId]->color_arrangement);
2502
2503 if (newStream->priv == NULL) {
2504 //New stream, construct channel
2505 switch (newStream->stream_type) {
2506 case CAMERA3_STREAM_INPUT:
2507 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2508 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2509 break;
2510 case CAMERA3_STREAM_BIDIRECTIONAL:
2511 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2512 GRALLOC_USAGE_HW_CAMERA_WRITE;
2513 break;
2514 case CAMERA3_STREAM_OUTPUT:
2515 /* For video encoding stream, set read/write rarely
2516 * flag so that they may be set to un-cached */
2517 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2518 newStream->usage |=
2519 (GRALLOC_USAGE_SW_READ_RARELY |
2520 GRALLOC_USAGE_SW_WRITE_RARELY |
2521 GRALLOC_USAGE_HW_CAMERA_WRITE);
2522 else if (IS_USAGE_ZSL(newStream->usage))
2523 {
2524 LOGD("ZSL usage flag skipping");
2525 }
2526 else if (newStream == zslStream
2527 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2528 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2529 } else
2530 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2531 break;
2532 default:
2533 LOGE("Invalid stream_type %d", newStream->stream_type);
2534 break;
2535 }
2536
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002537 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002538 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2539 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2540 QCamera3ProcessingChannel *channel = NULL;
2541 switch (newStream->format) {
2542 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2543 if ((newStream->usage &
2544 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2545 (streamList->operation_mode ==
2546 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2547 ) {
2548 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2549 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002550 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002551 this,
2552 newStream,
2553 (cam_stream_type_t)
2554 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2555 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2556 mMetadataChannel,
2557 0); //heap buffers are not required for HFR video channel
2558 if (channel == NULL) {
2559 LOGE("allocation of channel failed");
2560 pthread_mutex_unlock(&mMutex);
2561 return -ENOMEM;
2562 }
2563 //channel->getNumBuffers() will return 0 here so use
2564 //MAX_INFLIGH_HFR_REQUESTS
2565 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2566 newStream->priv = channel;
2567 LOGI("num video buffers in HFR mode: %d",
2568 MAX_INFLIGHT_HFR_REQUESTS);
2569 } else {
2570 /* Copy stream contents in HFR preview only case to create
2571 * dummy batch channel so that sensor streaming is in
2572 * HFR mode */
2573 if (!m_bIsVideo && (streamList->operation_mode ==
2574 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2575 mDummyBatchStream = *newStream;
2576 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002577 int bufferCount = MAX_INFLIGHT_REQUESTS;
2578 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2579 CAM_STREAM_TYPE_VIDEO) {
2580 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2581 bufferCount = MAX_VIDEO_BUFFERS;
2582 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002583 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2584 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002585 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002586 this,
2587 newStream,
2588 (cam_stream_type_t)
2589 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2590 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2591 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002592 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002593 if (channel == NULL) {
2594 LOGE("allocation of channel failed");
2595 pthread_mutex_unlock(&mMutex);
2596 return -ENOMEM;
2597 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002598 /* disable UBWC for preview, though supported,
2599 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002600 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002601 (previewSize.width == (int32_t)videoWidth)&&
2602 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002603 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002604 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002605 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002606 /* When goog_zoom is linked to the preview or video stream,
2607 * disable ubwc to the linked stream */
2608 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2609 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2610 channel->setUBWCEnabled(false);
2611 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002612 newStream->max_buffers = channel->getNumBuffers();
2613 newStream->priv = channel;
2614 }
2615 break;
2616 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2617 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2618 mChannelHandle,
2619 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002620 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002621 this,
2622 newStream,
2623 (cam_stream_type_t)
2624 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2625 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2626 mMetadataChannel);
2627 if (channel == NULL) {
2628 LOGE("allocation of YUV channel failed");
2629 pthread_mutex_unlock(&mMutex);
2630 return -ENOMEM;
2631 }
2632 newStream->max_buffers = channel->getNumBuffers();
2633 newStream->priv = channel;
2634 break;
2635 }
2636 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2637 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002638 case HAL_PIXEL_FORMAT_RAW10: {
2639 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2640 (HAL_DATASPACE_DEPTH != newStream->data_space))
2641 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002642 mRawChannel = new QCamera3RawChannel(
2643 mCameraHandle->camera_handle, mChannelHandle,
2644 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002645 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002646 this, newStream,
2647 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002648 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002649 if (mRawChannel == NULL) {
2650 LOGE("allocation of raw channel failed");
2651 pthread_mutex_unlock(&mMutex);
2652 return -ENOMEM;
2653 }
2654 newStream->max_buffers = mRawChannel->getNumBuffers();
2655 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2656 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002657 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002658 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002659 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2660 mDepthChannel = new QCamera3DepthChannel(
2661 mCameraHandle->camera_handle, mChannelHandle,
2662 mCameraHandle->ops, NULL, NULL, &padding_info,
2663 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2664 mMetadataChannel);
2665 if (NULL == mDepthChannel) {
2666 LOGE("Allocation of depth channel failed");
2667 pthread_mutex_unlock(&mMutex);
2668 return NO_MEMORY;
2669 }
2670 newStream->priv = mDepthChannel;
2671 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2672 } else {
2673 // Max live snapshot inflight buffer is 1. This is to mitigate
2674 // frame drop issues for video snapshot. The more buffers being
2675 // allocated, the more frame drops there are.
2676 mPictureChannel = new QCamera3PicChannel(
2677 mCameraHandle->camera_handle, mChannelHandle,
2678 mCameraHandle->ops, captureResultCb,
2679 setBufferErrorStatus, &padding_info, this, newStream,
2680 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2681 m_bIs4KVideo, isZsl, mMetadataChannel,
2682 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2683 if (mPictureChannel == NULL) {
2684 LOGE("allocation of channel failed");
2685 pthread_mutex_unlock(&mMutex);
2686 return -ENOMEM;
2687 }
2688 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2689 newStream->max_buffers = mPictureChannel->getNumBuffers();
2690 mPictureChannel->overrideYuvSize(
2691 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2692 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002693 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002694 break;
2695
2696 default:
2697 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002698 pthread_mutex_unlock(&mMutex);
2699 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002700 }
2701 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2702 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2703 } else {
2704 LOGE("Error, Unknown stream type");
2705 pthread_mutex_unlock(&mMutex);
2706 return -EINVAL;
2707 }
2708
2709 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002710 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2711 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002712 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002713 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002714 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2715 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2716 }
2717 }
2718
2719 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2720 it != mStreamInfo.end(); it++) {
2721 if ((*it)->stream == newStream) {
2722 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2723 break;
2724 }
2725 }
2726 } else {
2727 // Channel already exists for this stream
2728 // Do nothing for now
2729 }
2730 padding_info = gCamCapability[mCameraId]->padding_info;
2731
Emilian Peev7650c122017-01-19 08:24:33 -08002732 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002733 * since there is no real stream associated with it
2734 */
Emilian Peev7650c122017-01-19 08:24:33 -08002735 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002736 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2737 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002738 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002739 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002740 }
2741
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002742 // Let buffer dispatcher know the configured streams.
2743 mOutputBufferDispatcher.configureStreams(streamList);
2744
Binhao Lincdb362a2017-04-20 13:31:54 -07002745 // By default, preview stream TNR is disabled.
2746 // Enable TNR to the preview stream if all conditions below are satisfied:
2747 // 1. resolution <= 1080p.
2748 // 2. preview resolution == video resolution.
2749 // 3. video stream TNR is enabled.
2750 // 4. EIS2.0
2751 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2752 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2753 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2754 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2755 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2756 video_stream->width == preview_stream->width &&
2757 video_stream->height == preview_stream->height) {
2758 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2759 CAM_QCOM_FEATURE_CPP_TNR;
2760 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2761 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2762 ~CAM_QCOM_FEATURE_CDS;
2763 }
2764 }
2765
Thierry Strudel2896d122017-02-23 19:18:03 -08002766 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2767 onlyRaw = false;
2768 }
2769
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002770 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002771 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002772 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002773 cam_analysis_info_t analysisInfo;
2774 int32_t ret = NO_ERROR;
2775 ret = mCommon.getAnalysisInfo(
2776 FALSE,
2777 analysisFeatureMask,
2778 &analysisInfo);
2779 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002780 cam_color_filter_arrangement_t analysis_color_arrangement =
2781 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2782 CAM_FILTER_ARRANGEMENT_Y :
2783 gCamCapability[mCameraId]->color_arrangement);
2784 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2785 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002786 cam_dimension_t analysisDim;
2787 analysisDim = mCommon.getMatchingDimension(previewSize,
2788 analysisInfo.analysis_recommended_res);
2789
2790 mAnalysisChannel = new QCamera3SupportChannel(
2791 mCameraHandle->camera_handle,
2792 mChannelHandle,
2793 mCameraHandle->ops,
2794 &analysisInfo.analysis_padding_info,
2795 analysisFeatureMask,
2796 CAM_STREAM_TYPE_ANALYSIS,
2797 &analysisDim,
2798 (analysisInfo.analysis_format
2799 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2800 : CAM_FORMAT_YUV_420_NV21),
2801 analysisInfo.hw_analysis_supported,
2802 gCamCapability[mCameraId]->color_arrangement,
2803 this,
2804 0); // force buffer count to 0
2805 } else {
2806 LOGW("getAnalysisInfo failed, ret = %d", ret);
2807 }
2808 if (!mAnalysisChannel) {
2809 LOGW("Analysis channel cannot be created");
2810 }
2811 }
2812
Thierry Strudel3d639192016-09-09 11:52:26 -07002813 //RAW DUMP channel
2814 if (mEnableRawDump && isRawStreamRequested == false){
2815 cam_dimension_t rawDumpSize;
2816 rawDumpSize = getMaxRawSize(mCameraId);
2817 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2818 setPAAFSupport(rawDumpFeatureMask,
2819 CAM_STREAM_TYPE_RAW,
2820 gCamCapability[mCameraId]->color_arrangement);
2821 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2822 mChannelHandle,
2823 mCameraHandle->ops,
2824 rawDumpSize,
2825 &padding_info,
2826 this, rawDumpFeatureMask);
2827 if (!mRawDumpChannel) {
2828 LOGE("Raw Dump channel cannot be created");
2829 pthread_mutex_unlock(&mMutex);
2830 return -ENOMEM;
2831 }
2832 }
2833
Thierry Strudel3d639192016-09-09 11:52:26 -07002834 if (mAnalysisChannel) {
2835 cam_analysis_info_t analysisInfo;
2836 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2837 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2838 CAM_STREAM_TYPE_ANALYSIS;
2839 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2840 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002841 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002842 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2843 &analysisInfo);
2844 if (rc != NO_ERROR) {
2845 LOGE("getAnalysisInfo failed, ret = %d", rc);
2846 pthread_mutex_unlock(&mMutex);
2847 return rc;
2848 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002849 cam_color_filter_arrangement_t analysis_color_arrangement =
2850 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2851 CAM_FILTER_ARRANGEMENT_Y :
2852 gCamCapability[mCameraId]->color_arrangement);
2853 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2854 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2855 analysis_color_arrangement);
2856
Thierry Strudel3d639192016-09-09 11:52:26 -07002857 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002858 mCommon.getMatchingDimension(previewSize,
2859 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002860 mStreamConfigInfo.num_streams++;
2861 }
2862
Thierry Strudel2896d122017-02-23 19:18:03 -08002863 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002864 cam_analysis_info_t supportInfo;
2865 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2866 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2867 setPAAFSupport(callbackFeatureMask,
2868 CAM_STREAM_TYPE_CALLBACK,
2869 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002870 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002871 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002872 if (ret != NO_ERROR) {
2873 /* Ignore the error for Mono camera
2874 * because the PAAF bit mask is only set
2875 * for CAM_STREAM_TYPE_ANALYSIS stream type
2876 */
2877 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2878 LOGW("getAnalysisInfo failed, ret = %d", ret);
2879 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002880 }
2881 mSupportChannel = new QCamera3SupportChannel(
2882 mCameraHandle->camera_handle,
2883 mChannelHandle,
2884 mCameraHandle->ops,
2885 &gCamCapability[mCameraId]->padding_info,
2886 callbackFeatureMask,
2887 CAM_STREAM_TYPE_CALLBACK,
2888 &QCamera3SupportChannel::kDim,
2889 CAM_FORMAT_YUV_420_NV21,
2890 supportInfo.hw_analysis_supported,
2891 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002892 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002893 if (!mSupportChannel) {
2894 LOGE("dummy channel cannot be created");
2895 pthread_mutex_unlock(&mMutex);
2896 return -ENOMEM;
2897 }
2898 }
2899
2900 if (mSupportChannel) {
2901 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2902 QCamera3SupportChannel::kDim;
2903 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2904 CAM_STREAM_TYPE_CALLBACK;
2905 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2906 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2907 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2908 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2909 gCamCapability[mCameraId]->color_arrangement);
2910 mStreamConfigInfo.num_streams++;
2911 }
2912
2913 if (mRawDumpChannel) {
2914 cam_dimension_t rawSize;
2915 rawSize = getMaxRawSize(mCameraId);
2916 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2917 rawSize;
2918 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2919 CAM_STREAM_TYPE_RAW;
2920 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2921 CAM_QCOM_FEATURE_NONE;
2922 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2923 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2924 gCamCapability[mCameraId]->color_arrangement);
2925 mStreamConfigInfo.num_streams++;
2926 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002927
2928 if (mHdrPlusRawSrcChannel) {
2929 cam_dimension_t rawSize;
2930 rawSize = getMaxRawSize(mCameraId);
2931 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2932 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2933 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2934 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2935 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2936 gCamCapability[mCameraId]->color_arrangement);
2937 mStreamConfigInfo.num_streams++;
2938 }
2939
Thierry Strudel3d639192016-09-09 11:52:26 -07002940 /* In HFR mode, if video stream is not added, create a dummy channel so that
2941 * ISP can create a batch mode even for preview only case. This channel is
2942 * never 'start'ed (no stream-on), it is only 'initialized' */
2943 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2944 !m_bIsVideo) {
2945 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2946 setPAAFSupport(dummyFeatureMask,
2947 CAM_STREAM_TYPE_VIDEO,
2948 gCamCapability[mCameraId]->color_arrangement);
2949 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2950 mChannelHandle,
2951 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002952 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002953 this,
2954 &mDummyBatchStream,
2955 CAM_STREAM_TYPE_VIDEO,
2956 dummyFeatureMask,
2957 mMetadataChannel);
2958 if (NULL == mDummyBatchChannel) {
2959 LOGE("creation of mDummyBatchChannel failed."
2960 "Preview will use non-hfr sensor mode ");
2961 }
2962 }
2963 if (mDummyBatchChannel) {
2964 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2965 mDummyBatchStream.width;
2966 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2967 mDummyBatchStream.height;
2968 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2969 CAM_STREAM_TYPE_VIDEO;
2970 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2971 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2972 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2973 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2974 gCamCapability[mCameraId]->color_arrangement);
2975 mStreamConfigInfo.num_streams++;
2976 }
2977
2978 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2979 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002980 m_bIs4KVideo ? 0 :
2981 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002982
2983 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2984 for (pendingRequestIterator i = mPendingRequestsList.begin();
2985 i != mPendingRequestsList.end();) {
2986 i = erasePendingRequest(i);
2987 }
2988 mPendingFrameDropList.clear();
2989 // Initialize/Reset the pending buffers list
2990 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2991 req.mPendingBufferList.clear();
2992 }
2993 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2994
Thierry Strudel3d639192016-09-09 11:52:26 -07002995 mCurJpegMeta.clear();
2996 //Get min frame duration for this streams configuration
2997 deriveMinFrameDuration();
2998
Chien-Yu Chenee335912017-02-09 17:53:20 -08002999 mFirstPreviewIntentSeen = false;
3000
3001 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003002 {
3003 Mutex::Autolock l(gHdrPlusClientLock);
3004 disableHdrPlusModeLocked();
3005 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003006
Thierry Strudel3d639192016-09-09 11:52:26 -07003007 // Update state
3008 mState = CONFIGURED;
3009
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003010 mFirstMetadataCallback = true;
3011
Thierry Strudel3d639192016-09-09 11:52:26 -07003012 pthread_mutex_unlock(&mMutex);
3013
3014 return rc;
3015}
3016
3017/*===========================================================================
3018 * FUNCTION : validateCaptureRequest
3019 *
3020 * DESCRIPTION: validate a capture request from camera service
3021 *
3022 * PARAMETERS :
3023 * @request : request from framework to process
3024 *
3025 * RETURN :
3026 *
3027 *==========================================================================*/
3028int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003029 camera3_capture_request_t *request,
3030 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003031{
3032 ssize_t idx = 0;
3033 const camera3_stream_buffer_t *b;
3034 CameraMetadata meta;
3035
3036 /* Sanity check the request */
3037 if (request == NULL) {
3038 LOGE("NULL capture request");
3039 return BAD_VALUE;
3040 }
3041
3042 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3043 /*settings cannot be null for the first request*/
3044 return BAD_VALUE;
3045 }
3046
3047 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003048 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3049 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003050 LOGE("Request %d: No output buffers provided!",
3051 __FUNCTION__, frameNumber);
3052 return BAD_VALUE;
3053 }
3054 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3055 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3056 request->num_output_buffers, MAX_NUM_STREAMS);
3057 return BAD_VALUE;
3058 }
3059 if (request->input_buffer != NULL) {
3060 b = request->input_buffer;
3061 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3062 LOGE("Request %d: Buffer %ld: Status not OK!",
3063 frameNumber, (long)idx);
3064 return BAD_VALUE;
3065 }
3066 if (b->release_fence != -1) {
3067 LOGE("Request %d: Buffer %ld: Has a release fence!",
3068 frameNumber, (long)idx);
3069 return BAD_VALUE;
3070 }
3071 if (b->buffer == NULL) {
3072 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3073 frameNumber, (long)idx);
3074 return BAD_VALUE;
3075 }
3076 }
3077
3078 // Validate all buffers
3079 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003080 if (b == NULL) {
3081 return BAD_VALUE;
3082 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003083 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003084 QCamera3ProcessingChannel *channel =
3085 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3086 if (channel == NULL) {
3087 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3088 frameNumber, (long)idx);
3089 return BAD_VALUE;
3090 }
3091 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3092 LOGE("Request %d: Buffer %ld: Status not OK!",
3093 frameNumber, (long)idx);
3094 return BAD_VALUE;
3095 }
3096 if (b->release_fence != -1) {
3097 LOGE("Request %d: Buffer %ld: Has a release fence!",
3098 frameNumber, (long)idx);
3099 return BAD_VALUE;
3100 }
3101 if (b->buffer == NULL) {
3102 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3103 frameNumber, (long)idx);
3104 return BAD_VALUE;
3105 }
3106 if (*(b->buffer) == NULL) {
3107 LOGE("Request %d: Buffer %ld: NULL private handle!",
3108 frameNumber, (long)idx);
3109 return BAD_VALUE;
3110 }
3111 idx++;
3112 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003113 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003114 return NO_ERROR;
3115}
3116
3117/*===========================================================================
3118 * FUNCTION : deriveMinFrameDuration
3119 *
3120 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3121 * on currently configured streams.
3122 *
3123 * PARAMETERS : NONE
3124 *
3125 * RETURN : NONE
3126 *
3127 *==========================================================================*/
3128void QCamera3HardwareInterface::deriveMinFrameDuration()
3129{
3130 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3131
3132 maxJpegDim = 0;
3133 maxProcessedDim = 0;
3134 maxRawDim = 0;
3135
3136 // Figure out maximum jpeg, processed, and raw dimensions
3137 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3138 it != mStreamInfo.end(); it++) {
3139
3140 // Input stream doesn't have valid stream_type
3141 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3142 continue;
3143
3144 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3145 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3146 if (dimension > maxJpegDim)
3147 maxJpegDim = dimension;
3148 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3149 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3150 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3151 if (dimension > maxRawDim)
3152 maxRawDim = dimension;
3153 } else {
3154 if (dimension > maxProcessedDim)
3155 maxProcessedDim = dimension;
3156 }
3157 }
3158
3159 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3160 MAX_SIZES_CNT);
3161
3162 //Assume all jpeg dimensions are in processed dimensions.
3163 if (maxJpegDim > maxProcessedDim)
3164 maxProcessedDim = maxJpegDim;
3165 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3166 if (maxProcessedDim > maxRawDim) {
3167 maxRawDim = INT32_MAX;
3168
3169 for (size_t i = 0; i < count; i++) {
3170 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3171 gCamCapability[mCameraId]->raw_dim[i].height;
3172 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3173 maxRawDim = dimension;
3174 }
3175 }
3176
3177 //Find minimum durations for processed, jpeg, and raw
3178 for (size_t i = 0; i < count; i++) {
3179 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3180 gCamCapability[mCameraId]->raw_dim[i].height) {
3181 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3182 break;
3183 }
3184 }
3185 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3186 for (size_t i = 0; i < count; i++) {
3187 if (maxProcessedDim ==
3188 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3189 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3190 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3191 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3192 break;
3193 }
3194 }
3195}
3196
3197/*===========================================================================
3198 * FUNCTION : getMinFrameDuration
3199 *
3200 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3201 * and current request configuration.
3202 *
3203 * PARAMETERS : @request: requset sent by the frameworks
3204 *
3205 * RETURN : min farme duration for a particular request
3206 *
3207 *==========================================================================*/
3208int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3209{
3210 bool hasJpegStream = false;
3211 bool hasRawStream = false;
3212 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3213 const camera3_stream_t *stream = request->output_buffers[i].stream;
3214 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3215 hasJpegStream = true;
3216 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3217 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3218 stream->format == HAL_PIXEL_FORMAT_RAW16)
3219 hasRawStream = true;
3220 }
3221
3222 if (!hasJpegStream)
3223 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3224 else
3225 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3226}
3227
3228/*===========================================================================
3229 * FUNCTION : handleBuffersDuringFlushLock
3230 *
3231 * DESCRIPTION: Account for buffers returned from back-end during flush
3232 * This function is executed while mMutex is held by the caller.
3233 *
3234 * PARAMETERS :
3235 * @buffer: image buffer for the callback
3236 *
3237 * RETURN :
3238 *==========================================================================*/
3239void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3240{
3241 bool buffer_found = false;
3242 for (List<PendingBuffersInRequest>::iterator req =
3243 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3244 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3245 for (List<PendingBufferInfo>::iterator i =
3246 req->mPendingBufferList.begin();
3247 i != req->mPendingBufferList.end(); i++) {
3248 if (i->buffer == buffer->buffer) {
3249 mPendingBuffersMap.numPendingBufsAtFlush--;
3250 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3251 buffer->buffer, req->frame_number,
3252 mPendingBuffersMap.numPendingBufsAtFlush);
3253 buffer_found = true;
3254 break;
3255 }
3256 }
3257 if (buffer_found) {
3258 break;
3259 }
3260 }
3261 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3262 //signal the flush()
3263 LOGD("All buffers returned to HAL. Continue flush");
3264 pthread_cond_signal(&mBuffersCond);
3265 }
3266}
3267
Thierry Strudel3d639192016-09-09 11:52:26 -07003268/*===========================================================================
3269 * FUNCTION : handleBatchMetadata
3270 *
3271 * DESCRIPTION: Handles metadata buffer callback in batch mode
3272 *
3273 * PARAMETERS : @metadata_buf: metadata buffer
3274 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3275 * the meta buf in this method
3276 *
3277 * RETURN :
3278 *
3279 *==========================================================================*/
3280void QCamera3HardwareInterface::handleBatchMetadata(
3281 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3282{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003283 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003284
3285 if (NULL == metadata_buf) {
3286 LOGE("metadata_buf is NULL");
3287 return;
3288 }
3289 /* In batch mode, the metdata will contain the frame number and timestamp of
3290 * the last frame in the batch. Eg: a batch containing buffers from request
3291 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3292 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3293 * multiple process_capture_results */
3294 metadata_buffer_t *metadata =
3295 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3296 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3297 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3298 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3299 uint32_t frame_number = 0, urgent_frame_number = 0;
3300 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3301 bool invalid_metadata = false;
3302 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3303 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003304 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003305
3306 int32_t *p_frame_number_valid =
3307 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3308 uint32_t *p_frame_number =
3309 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3310 int64_t *p_capture_time =
3311 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3312 int32_t *p_urgent_frame_number_valid =
3313 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3314 uint32_t *p_urgent_frame_number =
3315 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3316
3317 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3318 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3319 (NULL == p_urgent_frame_number)) {
3320 LOGE("Invalid metadata");
3321 invalid_metadata = true;
3322 } else {
3323 frame_number_valid = *p_frame_number_valid;
3324 last_frame_number = *p_frame_number;
3325 last_frame_capture_time = *p_capture_time;
3326 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3327 last_urgent_frame_number = *p_urgent_frame_number;
3328 }
3329
3330 /* In batchmode, when no video buffers are requested, set_parms are sent
3331 * for every capture_request. The difference between consecutive urgent
3332 * frame numbers and frame numbers should be used to interpolate the
3333 * corresponding frame numbers and time stamps */
3334 pthread_mutex_lock(&mMutex);
3335 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003336 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3337 if(idx < 0) {
3338 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3339 last_urgent_frame_number);
3340 mState = ERROR;
3341 pthread_mutex_unlock(&mMutex);
3342 return;
3343 }
3344 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003345 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3346 first_urgent_frame_number;
3347
3348 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3349 urgent_frame_number_valid,
3350 first_urgent_frame_number, last_urgent_frame_number);
3351 }
3352
3353 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003354 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3355 if(idx < 0) {
3356 LOGE("Invalid frame number received: %d. Irrecoverable error",
3357 last_frame_number);
3358 mState = ERROR;
3359 pthread_mutex_unlock(&mMutex);
3360 return;
3361 }
3362 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003363 frameNumDiff = last_frame_number + 1 -
3364 first_frame_number;
3365 mPendingBatchMap.removeItem(last_frame_number);
3366
3367 LOGD("frm: valid: %d frm_num: %d - %d",
3368 frame_number_valid,
3369 first_frame_number, last_frame_number);
3370
3371 }
3372 pthread_mutex_unlock(&mMutex);
3373
3374 if (urgent_frame_number_valid || frame_number_valid) {
3375 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3376 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3377 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3378 urgentFrameNumDiff, last_urgent_frame_number);
3379 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3380 LOGE("frameNumDiff: %d frameNum: %d",
3381 frameNumDiff, last_frame_number);
3382 }
3383
3384 for (size_t i = 0; i < loopCount; i++) {
3385 /* handleMetadataWithLock is called even for invalid_metadata for
3386 * pipeline depth calculation */
3387 if (!invalid_metadata) {
3388 /* Infer frame number. Batch metadata contains frame number of the
3389 * last frame */
3390 if (urgent_frame_number_valid) {
3391 if (i < urgentFrameNumDiff) {
3392 urgent_frame_number =
3393 first_urgent_frame_number + i;
3394 LOGD("inferred urgent frame_number: %d",
3395 urgent_frame_number);
3396 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3397 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3398 } else {
3399 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3400 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3401 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3402 }
3403 }
3404
3405 /* Infer frame number. Batch metadata contains frame number of the
3406 * last frame */
3407 if (frame_number_valid) {
3408 if (i < frameNumDiff) {
3409 frame_number = first_frame_number + i;
3410 LOGD("inferred frame_number: %d", frame_number);
3411 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3412 CAM_INTF_META_FRAME_NUMBER, frame_number);
3413 } else {
3414 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3415 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3416 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3417 }
3418 }
3419
3420 if (last_frame_capture_time) {
3421 //Infer timestamp
3422 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003423 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003424 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003425 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003426 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3427 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3428 LOGD("batch capture_time: %lld, capture_time: %lld",
3429 last_frame_capture_time, capture_time);
3430 }
3431 }
3432 pthread_mutex_lock(&mMutex);
3433 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003434 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003435 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3436 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003437 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003438 pthread_mutex_unlock(&mMutex);
3439 }
3440
3441 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003442 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003443 mMetadataChannel->bufDone(metadata_buf);
3444 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003445 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003446 }
3447}
3448
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003449void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3450 camera3_error_msg_code_t errorCode)
3451{
3452 camera3_notify_msg_t notify_msg;
3453 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3454 notify_msg.type = CAMERA3_MSG_ERROR;
3455 notify_msg.message.error.error_code = errorCode;
3456 notify_msg.message.error.error_stream = NULL;
3457 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003458 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003459
3460 return;
3461}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003462
3463/*===========================================================================
3464 * FUNCTION : sendPartialMetadataWithLock
3465 *
3466 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3467 *
3468 * PARAMETERS : @metadata: metadata buffer
3469 * @requestIter: The iterator for the pending capture request for
3470 * which the partial result is being sen
3471 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3472 * last urgent metadata in a batch. Always true for non-batch mode
3473 *
3474 * RETURN :
3475 *
3476 *==========================================================================*/
3477
3478void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3479 metadata_buffer_t *metadata,
3480 const pendingRequestIterator requestIter,
3481 bool lastUrgentMetadataInBatch)
3482{
3483 camera3_capture_result_t result;
3484 memset(&result, 0, sizeof(camera3_capture_result_t));
3485
3486 requestIter->partial_result_cnt++;
3487
3488 // Extract 3A metadata
3489 result.result = translateCbUrgentMetadataToResultMetadata(
3490 metadata, lastUrgentMetadataInBatch);
3491 // Populate metadata result
3492 result.frame_number = requestIter->frame_number;
3493 result.num_output_buffers = 0;
3494 result.output_buffers = NULL;
3495 result.partial_result = requestIter->partial_result_cnt;
3496
3497 {
3498 Mutex::Autolock l(gHdrPlusClientLock);
3499 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3500 // Notify HDR+ client about the partial metadata.
3501 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3502 result.partial_result == PARTIAL_RESULT_COUNT);
3503 }
3504 }
3505
3506 orchestrateResult(&result);
3507 LOGD("urgent frame_number = %u", result.frame_number);
3508 free_camera_metadata((camera_metadata_t *)result.result);
3509}
3510
Thierry Strudel3d639192016-09-09 11:52:26 -07003511/*===========================================================================
3512 * FUNCTION : handleMetadataWithLock
3513 *
3514 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3515 *
3516 * PARAMETERS : @metadata_buf: metadata buffer
3517 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3518 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003519 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3520 * last urgent metadata in a batch. Always true for non-batch mode
3521 * @lastMetadataInBatch: Boolean to indicate whether this is the
3522 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003523 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3524 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003525 *
3526 * RETURN :
3527 *
3528 *==========================================================================*/
3529void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003530 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003531 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3532 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003533{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003534 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003535 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3536 //during flush do not send metadata from this thread
3537 LOGD("not sending metadata during flush or when mState is error");
3538 if (free_and_bufdone_meta_buf) {
3539 mMetadataChannel->bufDone(metadata_buf);
3540 free(metadata_buf);
3541 }
3542 return;
3543 }
3544
3545 //not in flush
3546 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3547 int32_t frame_number_valid, urgent_frame_number_valid;
3548 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003549 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003550 nsecs_t currentSysTime;
3551
3552 int32_t *p_frame_number_valid =
3553 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3554 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3555 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003556 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003557 int32_t *p_urgent_frame_number_valid =
3558 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3559 uint32_t *p_urgent_frame_number =
3560 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3561 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3562 metadata) {
3563 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3564 *p_frame_number_valid, *p_frame_number);
3565 }
3566
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003567 camera_metadata_t *resultMetadata = nullptr;
3568
Thierry Strudel3d639192016-09-09 11:52:26 -07003569 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3570 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3571 LOGE("Invalid metadata");
3572 if (free_and_bufdone_meta_buf) {
3573 mMetadataChannel->bufDone(metadata_buf);
3574 free(metadata_buf);
3575 }
3576 goto done_metadata;
3577 }
3578 frame_number_valid = *p_frame_number_valid;
3579 frame_number = *p_frame_number;
3580 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003581 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003582 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3583 urgent_frame_number = *p_urgent_frame_number;
3584 currentSysTime = systemTime(CLOCK_MONOTONIC);
3585
Jason Lee603176d2017-05-31 11:43:27 -07003586 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3587 const int tries = 3;
3588 nsecs_t bestGap, measured;
3589 for (int i = 0; i < tries; ++i) {
3590 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3591 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3592 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3593 const nsecs_t gap = tmono2 - tmono;
3594 if (i == 0 || gap < bestGap) {
3595 bestGap = gap;
3596 measured = tbase - ((tmono + tmono2) >> 1);
3597 }
3598 }
3599 capture_time -= measured;
3600 }
3601
Thierry Strudel3d639192016-09-09 11:52:26 -07003602 // Detect if buffers from any requests are overdue
3603 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003604 int64_t timeout;
3605 {
3606 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3607 // If there is a pending HDR+ request, the following requests may be blocked until the
3608 // HDR+ request is done. So allow a longer timeout.
3609 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3610 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3611 }
3612
3613 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003614 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003615 assert(missed.stream->priv);
3616 if (missed.stream->priv) {
3617 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3618 assert(ch->mStreams[0]);
3619 if (ch->mStreams[0]) {
3620 LOGE("Cancel missing frame = %d, buffer = %p,"
3621 "stream type = %d, stream format = %d",
3622 req.frame_number, missed.buffer,
3623 ch->mStreams[0]->getMyType(), missed.stream->format);
3624 ch->timeoutFrame(req.frame_number);
3625 }
3626 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003627 }
3628 }
3629 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003630 //For the very first metadata callback, regardless whether it contains valid
3631 //frame number, send the partial metadata for the jumpstarting requests.
3632 //Note that this has to be done even if the metadata doesn't contain valid
3633 //urgent frame number, because in the case only 1 request is ever submitted
3634 //to HAL, there won't be subsequent valid urgent frame number.
3635 if (mFirstMetadataCallback) {
3636 for (pendingRequestIterator i =
3637 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3638 if (i->bUseFirstPartial) {
3639 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3640 }
3641 }
3642 mFirstMetadataCallback = false;
3643 }
3644
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 //Partial result on process_capture_result for timestamp
3646 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003647 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003648
3649 //Recieved an urgent Frame Number, handle it
3650 //using partial results
3651 for (pendingRequestIterator i =
3652 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3653 LOGD("Iterator Frame = %d urgent frame = %d",
3654 i->frame_number, urgent_frame_number);
3655
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003656 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003657 (i->partial_result_cnt == 0)) {
3658 LOGE("Error: HAL missed urgent metadata for frame number %d",
3659 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003660 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003661 }
3662
3663 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003664 i->partial_result_cnt == 0) {
3665 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003666 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3667 // Instant AEC settled for this frame.
3668 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3669 mInstantAECSettledFrameNumber = urgent_frame_number;
3670 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003671 break;
3672 }
3673 }
3674 }
3675
3676 if (!frame_number_valid) {
3677 LOGD("Not a valid normal frame number, used as SOF only");
3678 if (free_and_bufdone_meta_buf) {
3679 mMetadataChannel->bufDone(metadata_buf);
3680 free(metadata_buf);
3681 }
3682 goto done_metadata;
3683 }
3684 LOGH("valid frame_number = %u, capture_time = %lld",
3685 frame_number, capture_time);
3686
Emilian Peev7650c122017-01-19 08:24:33 -08003687 if (metadata->is_depth_data_valid) {
3688 handleDepthDataLocked(metadata->depth_data, frame_number);
3689 }
3690
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003691 // Check whether any stream buffer corresponding to this is dropped or not
3692 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3693 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3694 for (auto & pendingRequest : mPendingRequestsList) {
3695 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3696 mInstantAECSettledFrameNumber)) {
3697 camera3_notify_msg_t notify_msg = {};
3698 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003699 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003700 QCamera3ProcessingChannel *channel =
3701 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003702 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003703 if (p_cam_frame_drop) {
3704 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003705 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003706 // Got the stream ID for drop frame.
3707 dropFrame = true;
3708 break;
3709 }
3710 }
3711 } else {
3712 // This is instant AEC case.
3713 // For instant AEC drop the stream untill AEC is settled.
3714 dropFrame = true;
3715 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003716
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003717 if (dropFrame) {
3718 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3719 if (p_cam_frame_drop) {
3720 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003721 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003722 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003723 } else {
3724 // For instant AEC, inform frame drop and frame number
3725 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3726 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003727 pendingRequest.frame_number, streamID,
3728 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003729 }
3730 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003731 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003732 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003733 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003734 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003735 if (p_cam_frame_drop) {
3736 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003737 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003738 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003739 } else {
3740 // For instant AEC, inform frame drop and frame number
3741 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3742 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003743 pendingRequest.frame_number, streamID,
3744 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003745 }
3746 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003747 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003748 PendingFrameDrop.stream_ID = streamID;
3749 // Add the Frame drop info to mPendingFrameDropList
3750 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003751 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003752 }
3753 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003754 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003755
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003756 for (auto & pendingRequest : mPendingRequestsList) {
3757 // Find the pending request with the frame number.
3758 if (pendingRequest.frame_number == frame_number) {
3759 // Update the sensor timestamp.
3760 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003761
Thierry Strudel3d639192016-09-09 11:52:26 -07003762
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003763 /* Set the timestamp in display metadata so that clients aware of
3764 private_handle such as VT can use this un-modified timestamps.
3765 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003766 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003767
Thierry Strudel3d639192016-09-09 11:52:26 -07003768 // Find channel requiring metadata, meaning internal offline postprocess
3769 // is needed.
3770 //TODO: for now, we don't support two streams requiring metadata at the same time.
3771 // (because we are not making copies, and metadata buffer is not reference counted.
3772 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003773 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3774 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003775 if (iter->need_metadata) {
3776 internalPproc = true;
3777 QCamera3ProcessingChannel *channel =
3778 (QCamera3ProcessingChannel *)iter->stream->priv;
3779 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003780 if(p_is_metabuf_queued != NULL) {
3781 *p_is_metabuf_queued = true;
3782 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003783 break;
3784 }
3785 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003786 for (auto itr = pendingRequest.internalRequestList.begin();
3787 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003788 if (itr->need_metadata) {
3789 internalPproc = true;
3790 QCamera3ProcessingChannel *channel =
3791 (QCamera3ProcessingChannel *)itr->stream->priv;
3792 channel->queueReprocMetadata(metadata_buf);
3793 break;
3794 }
3795 }
3796
Thierry Strudel54dc9782017-02-15 12:12:10 -08003797 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003798
3799 bool *enableZsl = nullptr;
3800 if (gExposeEnableZslKey) {
3801 enableZsl = &pendingRequest.enableZsl;
3802 }
3803
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003804 resultMetadata = translateFromHalMetadata(metadata,
3805 pendingRequest.timestamp, pendingRequest.request_id,
3806 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3807 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003808 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003809 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003810 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003811 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003813 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003814
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003815 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003816
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003817 if (pendingRequest.blob_request) {
3818 //Dump tuning metadata if enabled and available
3819 char prop[PROPERTY_VALUE_MAX];
3820 memset(prop, 0, sizeof(prop));
3821 property_get("persist.camera.dumpmetadata", prop, "0");
3822 int32_t enabled = atoi(prop);
3823 if (enabled && metadata->is_tuning_params_valid) {
3824 dumpMetadataToFile(metadata->tuning_params,
3825 mMetaFrameCount,
3826 enabled,
3827 "Snapshot",
3828 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003829 }
3830 }
3831
3832 if (!internalPproc) {
3833 LOGD("couldn't find need_metadata for this metadata");
3834 // Return metadata buffer
3835 if (free_and_bufdone_meta_buf) {
3836 mMetadataChannel->bufDone(metadata_buf);
3837 free(metadata_buf);
3838 }
3839 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003840
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003841 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003842 }
3843 }
3844
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003845 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3846
3847 // Try to send out capture result metadata.
3848 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003849 return;
3850
Thierry Strudel3d639192016-09-09 11:52:26 -07003851done_metadata:
3852 for (pendingRequestIterator i = mPendingRequestsList.begin();
3853 i != mPendingRequestsList.end() ;i++) {
3854 i->pipeline_depth++;
3855 }
3856 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3857 unblockRequestIfNecessary();
3858}
3859
3860/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003861 * FUNCTION : handleDepthDataWithLock
3862 *
3863 * DESCRIPTION: Handles incoming depth data
3864 *
3865 * PARAMETERS : @depthData : Depth data
3866 * @frameNumber: Frame number of the incoming depth data
3867 *
3868 * RETURN :
3869 *
3870 *==========================================================================*/
3871void QCamera3HardwareInterface::handleDepthDataLocked(
3872 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3873 uint32_t currentFrameNumber;
3874 buffer_handle_t *depthBuffer;
3875
3876 if (nullptr == mDepthChannel) {
3877 LOGE("Depth channel not present!");
3878 return;
3879 }
3880
3881 camera3_stream_buffer_t resultBuffer =
3882 {.acquire_fence = -1,
3883 .release_fence = -1,
3884 .status = CAMERA3_BUFFER_STATUS_OK,
3885 .buffer = nullptr,
3886 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003887 do {
3888 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3889 if (nullptr == depthBuffer) {
3890 break;
3891 }
3892
Emilian Peev7650c122017-01-19 08:24:33 -08003893 resultBuffer.buffer = depthBuffer;
3894 if (currentFrameNumber == frameNumber) {
3895 int32_t rc = mDepthChannel->populateDepthData(depthData,
3896 frameNumber);
3897 if (NO_ERROR != rc) {
3898 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3899 } else {
3900 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3901 }
3902 } else if (currentFrameNumber > frameNumber) {
3903 break;
3904 } else {
3905 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3906 {{currentFrameNumber, mDepthChannel->getStream(),
3907 CAMERA3_MSG_ERROR_BUFFER}}};
3908 orchestrateNotify(&notify_msg);
3909
3910 LOGE("Depth buffer for frame number: %d is missing "
3911 "returning back!", currentFrameNumber);
3912 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3913 }
3914 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003915 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003916 } while (currentFrameNumber < frameNumber);
3917}
3918
3919/*===========================================================================
3920 * FUNCTION : notifyErrorFoPendingDepthData
3921 *
3922 * DESCRIPTION: Returns error for any pending depth buffers
3923 *
3924 * PARAMETERS : depthCh - depth channel that needs to get flushed
3925 *
3926 * RETURN :
3927 *
3928 *==========================================================================*/
3929void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3930 QCamera3DepthChannel *depthCh) {
3931 uint32_t currentFrameNumber;
3932 buffer_handle_t *depthBuffer;
3933
3934 if (nullptr == depthCh) {
3935 return;
3936 }
3937
3938 camera3_notify_msg_t notify_msg =
3939 {.type = CAMERA3_MSG_ERROR,
3940 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3941 camera3_stream_buffer_t resultBuffer =
3942 {.acquire_fence = -1,
3943 .release_fence = -1,
3944 .buffer = nullptr,
3945 .stream = depthCh->getStream(),
3946 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003947
3948 while (nullptr !=
3949 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3950 depthCh->unmapBuffer(currentFrameNumber);
3951
3952 notify_msg.message.error.frame_number = currentFrameNumber;
3953 orchestrateNotify(&notify_msg);
3954
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003955 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003956 };
3957}
3958
3959/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003960 * FUNCTION : hdrPlusPerfLock
3961 *
3962 * DESCRIPTION: perf lock for HDR+ using custom intent
3963 *
3964 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3965 *
3966 * RETURN : None
3967 *
3968 *==========================================================================*/
3969void QCamera3HardwareInterface::hdrPlusPerfLock(
3970 mm_camera_super_buf_t *metadata_buf)
3971{
3972 if (NULL == metadata_buf) {
3973 LOGE("metadata_buf is NULL");
3974 return;
3975 }
3976 metadata_buffer_t *metadata =
3977 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3978 int32_t *p_frame_number_valid =
3979 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3980 uint32_t *p_frame_number =
3981 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3982
3983 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3984 LOGE("%s: Invalid metadata", __func__);
3985 return;
3986 }
3987
3988 //acquire perf lock for 5 sec after the last HDR frame is captured
3989 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3990 if ((p_frame_number != NULL) &&
3991 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003992 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003993 }
3994 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003995}
3996
3997/*===========================================================================
3998 * FUNCTION : handleInputBufferWithLock
3999 *
4000 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4001 *
4002 * PARAMETERS : @frame_number: frame number of the input buffer
4003 *
4004 * RETURN :
4005 *
4006 *==========================================================================*/
4007void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4008{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004009 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004010 pendingRequestIterator i = mPendingRequestsList.begin();
4011 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4012 i++;
4013 }
4014 if (i != mPendingRequestsList.end() && i->input_buffer) {
4015 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004016 CameraMetadata settings;
4017 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4018 if(i->settings) {
4019 settings = i->settings;
4020 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4021 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004022 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004023 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004024 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004025 } else {
4026 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004027 }
4028
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004029 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4030 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4031 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004032
4033 camera3_capture_result result;
4034 memset(&result, 0, sizeof(camera3_capture_result));
4035 result.frame_number = frame_number;
4036 result.result = i->settings;
4037 result.input_buffer = i->input_buffer;
4038 result.partial_result = PARTIAL_RESULT_COUNT;
4039
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004040 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004041 LOGD("Input request metadata and input buffer frame_number = %u",
4042 i->frame_number);
4043 i = erasePendingRequest(i);
4044 } else {
4045 LOGE("Could not find input request for frame number %d", frame_number);
4046 }
4047}
4048
4049/*===========================================================================
4050 * FUNCTION : handleBufferWithLock
4051 *
4052 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4053 *
4054 * PARAMETERS : @buffer: image buffer for the callback
4055 * @frame_number: frame number of the image buffer
4056 *
4057 * RETURN :
4058 *
4059 *==========================================================================*/
4060void QCamera3HardwareInterface::handleBufferWithLock(
4061 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4062{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004063 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004064
4065 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4066 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4067 }
4068
Thierry Strudel3d639192016-09-09 11:52:26 -07004069 /* Nothing to be done during error state */
4070 if ((ERROR == mState) || (DEINIT == mState)) {
4071 return;
4072 }
4073 if (mFlushPerf) {
4074 handleBuffersDuringFlushLock(buffer);
4075 return;
4076 }
4077 //not in flush
4078 // If the frame number doesn't exist in the pending request list,
4079 // directly send the buffer to the frameworks, and update pending buffers map
4080 // Otherwise, book-keep the buffer.
4081 pendingRequestIterator i = mPendingRequestsList.begin();
4082 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4083 i++;
4084 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004085
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004086 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004087 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004088 // For a reprocessing request, try to send out result metadata.
4089 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004090 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004091 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004092
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004093 // Check if this frame was dropped.
4094 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4095 m != mPendingFrameDropList.end(); m++) {
4096 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4097 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4098 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4099 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4100 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4101 frame_number, streamID);
4102 m = mPendingFrameDropList.erase(m);
4103 break;
4104 }
4105 }
4106
4107 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4108 LOGH("result frame_number = %d, buffer = %p",
4109 frame_number, buffer->buffer);
4110
4111 mPendingBuffersMap.removeBuf(buffer->buffer);
4112 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4113
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004114 if (mPreviewStarted == false) {
4115 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4116 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004117 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4118
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004119 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4120 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4121 mPreviewStarted = true;
4122
4123 // Set power hint for preview
4124 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4125 }
4126 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004127}
4128
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004129void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004130 const camera_metadata_t *resultMetadata)
4131{
4132 // Find the pending request for this result metadata.
4133 auto requestIter = mPendingRequestsList.begin();
4134 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4135 requestIter++;
4136 }
4137
4138 if (requestIter == mPendingRequestsList.end()) {
4139 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4140 return;
4141 }
4142
4143 // Update the result metadata
4144 requestIter->resultMetadata = resultMetadata;
4145
4146 // Check what type of request this is.
4147 bool liveRequest = false;
4148 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004149 // HDR+ request doesn't have partial results.
4150 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004151 } else if (requestIter->input_buffer != nullptr) {
4152 // Reprocessing request result is the same as settings.
4153 requestIter->resultMetadata = requestIter->settings;
4154 // Reprocessing request doesn't have partial results.
4155 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4156 } else {
4157 liveRequest = true;
4158 requestIter->partial_result_cnt++;
4159 mPendingLiveRequest--;
4160
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004161 {
4162 Mutex::Autolock l(gHdrPlusClientLock);
4163 // For a live request, send the metadata to HDR+ client.
4164 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4165 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4166 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4167 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004168 }
4169 }
4170
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004171 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4172 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004173 bool readyToSend = true;
4174
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004175 // Iterate through the pending requests to send out result metadata that are ready. Also if
4176 // this result metadata belongs to a live request, notify errors for previous live requests
4177 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004178 auto iter = mPendingRequestsList.begin();
4179 while (iter != mPendingRequestsList.end()) {
4180 // Check if current pending request is ready. If it's not ready, the following pending
4181 // requests are also not ready.
4182 if (readyToSend && iter->resultMetadata == nullptr) {
4183 readyToSend = false;
4184 }
4185
4186 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4187
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004188 camera3_capture_result_t result = {};
4189 result.frame_number = iter->frame_number;
4190 result.result = iter->resultMetadata;
4191 result.partial_result = iter->partial_result_cnt;
4192
4193 // If this pending buffer has result metadata, we may be able to send out shutter callback
4194 // and result metadata.
4195 if (iter->resultMetadata != nullptr) {
4196 if (!readyToSend) {
4197 // If any of the previous pending request is not ready, this pending request is
4198 // also not ready to send in order to keep shutter callbacks and result metadata
4199 // in order.
4200 iter++;
4201 continue;
4202 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004203 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4204 // If the result metadata belongs to a live request, notify errors for previous pending
4205 // live requests.
4206 mPendingLiveRequest--;
4207
4208 CameraMetadata dummyMetadata;
4209 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4210 result.result = dummyMetadata.release();
4211
4212 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004213
4214 // partial_result should be PARTIAL_RESULT_CNT in case of
4215 // ERROR_RESULT.
4216 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4217 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004218 } else {
4219 iter++;
4220 continue;
4221 }
4222
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004223 result.output_buffers = nullptr;
4224 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004225 orchestrateResult(&result);
4226
4227 // For reprocessing, result metadata is the same as settings so do not free it here to
4228 // avoid double free.
4229 if (result.result != iter->settings) {
4230 free_camera_metadata((camera_metadata_t *)result.result);
4231 }
4232 iter->resultMetadata = nullptr;
4233 iter = erasePendingRequest(iter);
4234 }
4235
4236 if (liveRequest) {
4237 for (auto &iter : mPendingRequestsList) {
4238 // Increment pipeline depth for the following pending requests.
4239 if (iter.frame_number > frameNumber) {
4240 iter.pipeline_depth++;
4241 }
4242 }
4243 }
4244
4245 unblockRequestIfNecessary();
4246}
4247
Thierry Strudel3d639192016-09-09 11:52:26 -07004248/*===========================================================================
4249 * FUNCTION : unblockRequestIfNecessary
4250 *
4251 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4252 * that mMutex is held when this function is called.
4253 *
4254 * PARAMETERS :
4255 *
4256 * RETURN :
4257 *
4258 *==========================================================================*/
4259void QCamera3HardwareInterface::unblockRequestIfNecessary()
4260{
4261 // Unblock process_capture_request
4262 pthread_cond_signal(&mRequestCond);
4263}
4264
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004265/*===========================================================================
4266 * FUNCTION : isHdrSnapshotRequest
4267 *
4268 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4269 *
4270 * PARAMETERS : camera3 request structure
4271 *
4272 * RETURN : boolean decision variable
4273 *
4274 *==========================================================================*/
4275bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4276{
4277 if (request == NULL) {
4278 LOGE("Invalid request handle");
4279 assert(0);
4280 return false;
4281 }
4282
4283 if (!mForceHdrSnapshot) {
4284 CameraMetadata frame_settings;
4285 frame_settings = request->settings;
4286
4287 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4288 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4289 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4290 return false;
4291 }
4292 } else {
4293 return false;
4294 }
4295
4296 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4297 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4298 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4299 return false;
4300 }
4301 } else {
4302 return false;
4303 }
4304 }
4305
4306 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4307 if (request->output_buffers[i].stream->format
4308 == HAL_PIXEL_FORMAT_BLOB) {
4309 return true;
4310 }
4311 }
4312
4313 return false;
4314}
4315/*===========================================================================
4316 * FUNCTION : orchestrateRequest
4317 *
4318 * DESCRIPTION: Orchestrates a capture request from camera service
4319 *
4320 * PARAMETERS :
4321 * @request : request from framework to process
4322 *
4323 * RETURN : Error status codes
4324 *
4325 *==========================================================================*/
4326int32_t QCamera3HardwareInterface::orchestrateRequest(
4327 camera3_capture_request_t *request)
4328{
4329
4330 uint32_t originalFrameNumber = request->frame_number;
4331 uint32_t originalOutputCount = request->num_output_buffers;
4332 const camera_metadata_t *original_settings = request->settings;
4333 List<InternalRequest> internallyRequestedStreams;
4334 List<InternalRequest> emptyInternalList;
4335
4336 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4337 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4338 uint32_t internalFrameNumber;
4339 CameraMetadata modified_meta;
4340
4341
4342 /* Add Blob channel to list of internally requested streams */
4343 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4344 if (request->output_buffers[i].stream->format
4345 == HAL_PIXEL_FORMAT_BLOB) {
4346 InternalRequest streamRequested;
4347 streamRequested.meteringOnly = 1;
4348 streamRequested.need_metadata = 0;
4349 streamRequested.stream = request->output_buffers[i].stream;
4350 internallyRequestedStreams.push_back(streamRequested);
4351 }
4352 }
4353 request->num_output_buffers = 0;
4354 auto itr = internallyRequestedStreams.begin();
4355
4356 /* Modify setting to set compensation */
4357 modified_meta = request->settings;
4358 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4359 uint8_t aeLock = 1;
4360 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4361 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4362 camera_metadata_t *modified_settings = modified_meta.release();
4363 request->settings = modified_settings;
4364
4365 /* Capture Settling & -2x frame */
4366 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4367 request->frame_number = internalFrameNumber;
4368 processCaptureRequest(request, internallyRequestedStreams);
4369
4370 request->num_output_buffers = originalOutputCount;
4371 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4372 request->frame_number = internalFrameNumber;
4373 processCaptureRequest(request, emptyInternalList);
4374 request->num_output_buffers = 0;
4375
4376 modified_meta = modified_settings;
4377 expCompensation = 0;
4378 aeLock = 1;
4379 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4380 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4381 modified_settings = modified_meta.release();
4382 request->settings = modified_settings;
4383
4384 /* Capture Settling & 0X frame */
4385
4386 itr = internallyRequestedStreams.begin();
4387 if (itr == internallyRequestedStreams.end()) {
4388 LOGE("Error Internally Requested Stream list is empty");
4389 assert(0);
4390 } else {
4391 itr->need_metadata = 0;
4392 itr->meteringOnly = 1;
4393 }
4394
4395 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4396 request->frame_number = internalFrameNumber;
4397 processCaptureRequest(request, internallyRequestedStreams);
4398
4399 itr = internallyRequestedStreams.begin();
4400 if (itr == internallyRequestedStreams.end()) {
4401 ALOGE("Error Internally Requested Stream list is empty");
4402 assert(0);
4403 } else {
4404 itr->need_metadata = 1;
4405 itr->meteringOnly = 0;
4406 }
4407
4408 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4409 request->frame_number = internalFrameNumber;
4410 processCaptureRequest(request, internallyRequestedStreams);
4411
4412 /* Capture 2X frame*/
4413 modified_meta = modified_settings;
4414 expCompensation = GB_HDR_2X_STEP_EV;
4415 aeLock = 1;
4416 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4417 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4418 modified_settings = modified_meta.release();
4419 request->settings = modified_settings;
4420
4421 itr = internallyRequestedStreams.begin();
4422 if (itr == internallyRequestedStreams.end()) {
4423 ALOGE("Error Internally Requested Stream list is empty");
4424 assert(0);
4425 } else {
4426 itr->need_metadata = 0;
4427 itr->meteringOnly = 1;
4428 }
4429 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4430 request->frame_number = internalFrameNumber;
4431 processCaptureRequest(request, internallyRequestedStreams);
4432
4433 itr = internallyRequestedStreams.begin();
4434 if (itr == internallyRequestedStreams.end()) {
4435 ALOGE("Error Internally Requested Stream list is empty");
4436 assert(0);
4437 } else {
4438 itr->need_metadata = 1;
4439 itr->meteringOnly = 0;
4440 }
4441
4442 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4443 request->frame_number = internalFrameNumber;
4444 processCaptureRequest(request, internallyRequestedStreams);
4445
4446
4447 /* Capture 2X on original streaming config*/
4448 internallyRequestedStreams.clear();
4449
4450 /* Restore original settings pointer */
4451 request->settings = original_settings;
4452 } else {
4453 uint32_t internalFrameNumber;
4454 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4455 request->frame_number = internalFrameNumber;
4456 return processCaptureRequest(request, internallyRequestedStreams);
4457 }
4458
4459 return NO_ERROR;
4460}
4461
4462/*===========================================================================
4463 * FUNCTION : orchestrateResult
4464 *
4465 * DESCRIPTION: Orchestrates a capture result to camera service
4466 *
4467 * PARAMETERS :
4468 * @request : request from framework to process
4469 *
4470 * RETURN :
4471 *
4472 *==========================================================================*/
4473void QCamera3HardwareInterface::orchestrateResult(
4474 camera3_capture_result_t *result)
4475{
4476 uint32_t frameworkFrameNumber;
4477 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4478 frameworkFrameNumber);
4479 if (rc != NO_ERROR) {
4480 LOGE("Cannot find translated frameworkFrameNumber");
4481 assert(0);
4482 } else {
4483 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004484 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004485 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004486 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004487 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4488 camera_metadata_entry_t entry;
4489 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4490 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004491 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004492 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4493 if (ret != OK)
4494 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004495 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004496 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004497 result->frame_number = frameworkFrameNumber;
4498 mCallbackOps->process_capture_result(mCallbackOps, result);
4499 }
4500 }
4501}
4502
4503/*===========================================================================
4504 * FUNCTION : orchestrateNotify
4505 *
4506 * DESCRIPTION: Orchestrates a notify to camera service
4507 *
4508 * PARAMETERS :
4509 * @request : request from framework to process
4510 *
4511 * RETURN :
4512 *
4513 *==========================================================================*/
4514void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4515{
4516 uint32_t frameworkFrameNumber;
4517 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004518 int32_t rc = NO_ERROR;
4519
4520 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004521 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004522
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004523 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004524 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4525 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4526 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004527 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004528 LOGE("Cannot find translated frameworkFrameNumber");
4529 assert(0);
4530 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004531 }
4532 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004533
4534 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4535 LOGD("Internal Request drop the notifyCb");
4536 } else {
4537 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4538 mCallbackOps->notify(mCallbackOps, notify_msg);
4539 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004540}
4541
4542/*===========================================================================
4543 * FUNCTION : FrameNumberRegistry
4544 *
4545 * DESCRIPTION: Constructor
4546 *
4547 * PARAMETERS :
4548 *
4549 * RETURN :
4550 *
4551 *==========================================================================*/
4552FrameNumberRegistry::FrameNumberRegistry()
4553{
4554 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4555}
4556
4557/*===========================================================================
4558 * FUNCTION : ~FrameNumberRegistry
4559 *
4560 * DESCRIPTION: Destructor
4561 *
4562 * PARAMETERS :
4563 *
4564 * RETURN :
4565 *
4566 *==========================================================================*/
4567FrameNumberRegistry::~FrameNumberRegistry()
4568{
4569}
4570
4571/*===========================================================================
4572 * FUNCTION : PurgeOldEntriesLocked
4573 *
4574 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4575 *
4576 * PARAMETERS :
4577 *
4578 * RETURN : NONE
4579 *
4580 *==========================================================================*/
4581void FrameNumberRegistry::purgeOldEntriesLocked()
4582{
4583 while (_register.begin() != _register.end()) {
4584 auto itr = _register.begin();
4585 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4586 _register.erase(itr);
4587 } else {
4588 return;
4589 }
4590 }
4591}
4592
4593/*===========================================================================
4594 * FUNCTION : allocStoreInternalFrameNumber
4595 *
4596 * DESCRIPTION: Method to note down a framework request and associate a new
4597 * internal request number against it
4598 *
4599 * PARAMETERS :
4600 * @fFrameNumber: Identifier given by framework
4601 * @internalFN : Output parameter which will have the newly generated internal
4602 * entry
4603 *
4604 * RETURN : Error code
4605 *
4606 *==========================================================================*/
4607int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4608 uint32_t &internalFrameNumber)
4609{
4610 Mutex::Autolock lock(mRegistryLock);
4611 internalFrameNumber = _nextFreeInternalNumber++;
4612 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4613 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4614 purgeOldEntriesLocked();
4615 return NO_ERROR;
4616}
4617
4618/*===========================================================================
4619 * FUNCTION : generateStoreInternalFrameNumber
4620 *
4621 * DESCRIPTION: Method to associate a new internal request number independent
4622 * of any associate with framework requests
4623 *
4624 * PARAMETERS :
4625 * @internalFrame#: Output parameter which will have the newly generated internal
4626 *
4627 *
4628 * RETURN : Error code
4629 *
4630 *==========================================================================*/
4631int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4632{
4633 Mutex::Autolock lock(mRegistryLock);
4634 internalFrameNumber = _nextFreeInternalNumber++;
4635 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4636 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4637 purgeOldEntriesLocked();
4638 return NO_ERROR;
4639}
4640
4641/*===========================================================================
4642 * FUNCTION : getFrameworkFrameNumber
4643 *
4644 * DESCRIPTION: Method to query the framework framenumber given an internal #
4645 *
4646 * PARAMETERS :
4647 * @internalFrame#: Internal reference
4648 * @frameworkframenumber: Output parameter holding framework frame entry
4649 *
4650 * RETURN : Error code
4651 *
4652 *==========================================================================*/
4653int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4654 uint32_t &frameworkFrameNumber)
4655{
4656 Mutex::Autolock lock(mRegistryLock);
4657 auto itr = _register.find(internalFrameNumber);
4658 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004659 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004660 return -ENOENT;
4661 }
4662
4663 frameworkFrameNumber = itr->second;
4664 purgeOldEntriesLocked();
4665 return NO_ERROR;
4666}
Thierry Strudel3d639192016-09-09 11:52:26 -07004667
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004668status_t QCamera3HardwareInterface::fillPbStreamConfig(
4669 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4670 QCamera3Channel *channel, uint32_t streamIndex) {
4671 if (config == nullptr) {
4672 LOGE("%s: config is null", __FUNCTION__);
4673 return BAD_VALUE;
4674 }
4675
4676 if (channel == nullptr) {
4677 LOGE("%s: channel is null", __FUNCTION__);
4678 return BAD_VALUE;
4679 }
4680
4681 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4682 if (stream == nullptr) {
4683 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4684 return NAME_NOT_FOUND;
4685 }
4686
4687 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4688 if (streamInfo == nullptr) {
4689 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4690 return NAME_NOT_FOUND;
4691 }
4692
4693 config->id = pbStreamId;
4694 config->image.width = streamInfo->dim.width;
4695 config->image.height = streamInfo->dim.height;
4696 config->image.padding = 0;
4697 config->image.format = pbStreamFormat;
4698
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004699 uint32_t totalPlaneSize = 0;
4700
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004701 // Fill plane information.
4702 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4703 pbcamera::PlaneConfiguration plane;
4704 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4705 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4706 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004707
4708 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004709 }
4710
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004711 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004712 return OK;
4713}
4714
Thierry Strudel3d639192016-09-09 11:52:26 -07004715/*===========================================================================
4716 * FUNCTION : processCaptureRequest
4717 *
4718 * DESCRIPTION: process a capture request from camera service
4719 *
4720 * PARAMETERS :
4721 * @request : request from framework to process
4722 *
4723 * RETURN :
4724 *
4725 *==========================================================================*/
4726int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004727 camera3_capture_request_t *request,
4728 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004729{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004730 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004731 int rc = NO_ERROR;
4732 int32_t request_id;
4733 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004734 bool isVidBufRequested = false;
4735 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004736 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004737
4738 pthread_mutex_lock(&mMutex);
4739
4740 // Validate current state
4741 switch (mState) {
4742 case CONFIGURED:
4743 case STARTED:
4744 /* valid state */
4745 break;
4746
4747 case ERROR:
4748 pthread_mutex_unlock(&mMutex);
4749 handleCameraDeviceError();
4750 return -ENODEV;
4751
4752 default:
4753 LOGE("Invalid state %d", mState);
4754 pthread_mutex_unlock(&mMutex);
4755 return -ENODEV;
4756 }
4757
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004758 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004759 if (rc != NO_ERROR) {
4760 LOGE("incoming request is not valid");
4761 pthread_mutex_unlock(&mMutex);
4762 return rc;
4763 }
4764
4765 meta = request->settings;
4766
4767 // For first capture request, send capture intent, and
4768 // stream on all streams
4769 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004770 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004771 // send an unconfigure to the backend so that the isp
4772 // resources are deallocated
4773 if (!mFirstConfiguration) {
4774 cam_stream_size_info_t stream_config_info;
4775 int32_t hal_version = CAM_HAL_V3;
4776 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4777 stream_config_info.buffer_info.min_buffers =
4778 MIN_INFLIGHT_REQUESTS;
4779 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004780 m_bIs4KVideo ? 0 :
4781 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004782 clear_metadata_buffer(mParameters);
4783 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4784 CAM_INTF_PARM_HAL_VERSION, hal_version);
4785 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4786 CAM_INTF_META_STREAM_INFO, stream_config_info);
4787 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4788 mParameters);
4789 if (rc < 0) {
4790 LOGE("set_parms for unconfigure failed");
4791 pthread_mutex_unlock(&mMutex);
4792 return rc;
4793 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004794
Thierry Strudel3d639192016-09-09 11:52:26 -07004795 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004796 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004797 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004798 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004799 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004800 property_get("persist.camera.is_type", is_type_value, "4");
4801 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4802 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4803 property_get("persist.camera.is_type_preview", is_type_value, "4");
4804 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4805 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004806
4807 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4808 int32_t hal_version = CAM_HAL_V3;
4809 uint8_t captureIntent =
4810 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4811 mCaptureIntent = captureIntent;
4812 clear_metadata_buffer(mParameters);
4813 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4814 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4815 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004816 if (mFirstConfiguration) {
4817 // configure instant AEC
4818 // Instant AEC is a session based parameter and it is needed only
4819 // once per complete session after open camera.
4820 // i.e. This is set only once for the first capture request, after open camera.
4821 setInstantAEC(meta);
4822 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004823 uint8_t fwkVideoStabMode=0;
4824 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4825 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4826 }
4827
Xue Tuecac74e2017-04-17 13:58:15 -07004828 // If EIS setprop is enabled then only turn it on for video/preview
4829 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004830 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004831 int32_t vsMode;
4832 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4834 rc = BAD_VALUE;
4835 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004836 LOGD("setEis %d", setEis);
4837 bool eis3Supported = false;
4838 size_t count = IS_TYPE_MAX;
4839 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4840 for (size_t i = 0; i < count; i++) {
4841 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4842 eis3Supported = true;
4843 break;
4844 }
4845 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004846
4847 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004848 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004849 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4850 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004851 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4852 is_type = isTypePreview;
4853 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4854 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4855 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004856 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004857 } else {
4858 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004859 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004860 } else {
4861 is_type = IS_TYPE_NONE;
4862 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004864 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004865 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4866 }
4867 }
4868
4869 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4870 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4871
Thierry Strudel54dc9782017-02-15 12:12:10 -08004872 //Disable tintless only if the property is set to 0
4873 memset(prop, 0, sizeof(prop));
4874 property_get("persist.camera.tintless.enable", prop, "1");
4875 int32_t tintless_value = atoi(prop);
4876
Thierry Strudel3d639192016-09-09 11:52:26 -07004877 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4878 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004879
Thierry Strudel3d639192016-09-09 11:52:26 -07004880 //Disable CDS for HFR mode or if DIS/EIS is on.
4881 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4882 //after every configure_stream
4883 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4884 (m_bIsVideo)) {
4885 int32_t cds = CAM_CDS_MODE_OFF;
4886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4887 CAM_INTF_PARM_CDS_MODE, cds))
4888 LOGE("Failed to disable CDS for HFR mode");
4889
4890 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004891
4892 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4893 uint8_t* use_av_timer = NULL;
4894
4895 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004896 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004897 use_av_timer = &m_debug_avtimer;
4898 }
4899 else{
4900 use_av_timer =
4901 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004902 if (use_av_timer) {
4903 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4904 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004905 }
4906
4907 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4908 rc = BAD_VALUE;
4909 }
4910 }
4911
Thierry Strudel3d639192016-09-09 11:52:26 -07004912 setMobicat();
4913
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004914 uint8_t nrMode = 0;
4915 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4916 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4917 }
4918
Thierry Strudel3d639192016-09-09 11:52:26 -07004919 /* Set fps and hfr mode while sending meta stream info so that sensor
4920 * can configure appropriate streaming mode */
4921 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004922 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4923 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004924 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4925 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004926 if (rc == NO_ERROR) {
4927 int32_t max_fps =
4928 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004929 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004930 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4931 }
4932 /* For HFR, more buffers are dequeued upfront to improve the performance */
4933 if (mBatchSize) {
4934 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4935 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4936 }
4937 }
4938 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004939 LOGE("setHalFpsRange failed");
4940 }
4941 }
4942 if (meta.exists(ANDROID_CONTROL_MODE)) {
4943 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4944 rc = extractSceneMode(meta, metaMode, mParameters);
4945 if (rc != NO_ERROR) {
4946 LOGE("extractSceneMode failed");
4947 }
4948 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004949 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004950
Thierry Strudel04e026f2016-10-10 11:27:36 -07004951 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4952 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4953 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4954 rc = setVideoHdrMode(mParameters, vhdr);
4955 if (rc != NO_ERROR) {
4956 LOGE("setVideoHDR is failed");
4957 }
4958 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004959
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004960 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004961 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004962 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004963 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4964 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4965 sensorModeFullFov)) {
4966 rc = BAD_VALUE;
4967 }
4968 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004969 //TODO: validate the arguments, HSV scenemode should have only the
4970 //advertised fps ranges
4971
4972 /*set the capture intent, hal version, tintless, stream info,
4973 *and disenable parameters to the backend*/
4974 LOGD("set_parms META_STREAM_INFO " );
4975 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004976 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4977 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004978 mStreamConfigInfo.type[i],
4979 mStreamConfigInfo.stream_sizes[i].width,
4980 mStreamConfigInfo.stream_sizes[i].height,
4981 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004982 mStreamConfigInfo.format[i],
4983 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004984 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004985
Thierry Strudel3d639192016-09-09 11:52:26 -07004986 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4987 mParameters);
4988 if (rc < 0) {
4989 LOGE("set_parms failed for hal version, stream info");
4990 }
4991
Chien-Yu Chen605c3872017-06-14 11:09:23 -07004992 cam_sensor_mode_info_t sensorModeInfo = {};
4993 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004994 if (rc != NO_ERROR) {
4995 LOGE("Failed to get sensor output size");
4996 pthread_mutex_unlock(&mMutex);
4997 goto error_exit;
4998 }
4999
5000 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5001 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005002 sensorModeInfo.active_array_size.width,
5003 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005004
5005 /* Set batchmode before initializing channel. Since registerBuffer
5006 * internally initializes some of the channels, better set batchmode
5007 * even before first register buffer */
5008 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5009 it != mStreamInfo.end(); it++) {
5010 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5011 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5012 && mBatchSize) {
5013 rc = channel->setBatchSize(mBatchSize);
5014 //Disable per frame map unmap for HFR/batchmode case
5015 rc |= channel->setPerFrameMapUnmap(false);
5016 if (NO_ERROR != rc) {
5017 LOGE("Channel init failed %d", rc);
5018 pthread_mutex_unlock(&mMutex);
5019 goto error_exit;
5020 }
5021 }
5022 }
5023
5024 //First initialize all streams
5025 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5026 it != mStreamInfo.end(); it++) {
5027 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005028
5029 /* Initial value of NR mode is needed before stream on */
5030 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005031 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5032 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005033 setEis) {
5034 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5035 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5036 is_type = mStreamConfigInfo.is_type[i];
5037 break;
5038 }
5039 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005040 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005041 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005042 rc = channel->initialize(IS_TYPE_NONE);
5043 }
5044 if (NO_ERROR != rc) {
5045 LOGE("Channel initialization failed %d", rc);
5046 pthread_mutex_unlock(&mMutex);
5047 goto error_exit;
5048 }
5049 }
5050
5051 if (mRawDumpChannel) {
5052 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5053 if (rc != NO_ERROR) {
5054 LOGE("Error: Raw Dump Channel init failed");
5055 pthread_mutex_unlock(&mMutex);
5056 goto error_exit;
5057 }
5058 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005059 if (mHdrPlusRawSrcChannel) {
5060 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5061 if (rc != NO_ERROR) {
5062 LOGE("Error: HDR+ RAW Source Channel init failed");
5063 pthread_mutex_unlock(&mMutex);
5064 goto error_exit;
5065 }
5066 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005067 if (mSupportChannel) {
5068 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5069 if (rc < 0) {
5070 LOGE("Support channel initialization failed");
5071 pthread_mutex_unlock(&mMutex);
5072 goto error_exit;
5073 }
5074 }
5075 if (mAnalysisChannel) {
5076 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5077 if (rc < 0) {
5078 LOGE("Analysis channel initialization failed");
5079 pthread_mutex_unlock(&mMutex);
5080 goto error_exit;
5081 }
5082 }
5083 if (mDummyBatchChannel) {
5084 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5085 if (rc < 0) {
5086 LOGE("mDummyBatchChannel setBatchSize failed");
5087 pthread_mutex_unlock(&mMutex);
5088 goto error_exit;
5089 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005090 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005091 if (rc < 0) {
5092 LOGE("mDummyBatchChannel initialization failed");
5093 pthread_mutex_unlock(&mMutex);
5094 goto error_exit;
5095 }
5096 }
5097
5098 // Set bundle info
5099 rc = setBundleInfo();
5100 if (rc < 0) {
5101 LOGE("setBundleInfo failed %d", rc);
5102 pthread_mutex_unlock(&mMutex);
5103 goto error_exit;
5104 }
5105
5106 //update settings from app here
5107 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5108 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5109 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5110 }
5111 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5112 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5113 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5114 }
5115 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5116 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5117 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5118
5119 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5120 (mLinkedCameraId != mCameraId) ) {
5121 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5122 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005123 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005124 goto error_exit;
5125 }
5126 }
5127
5128 // add bundle related cameras
5129 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5130 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005131 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5132 &m_pDualCamCmdPtr->bundle_info;
5133 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005134 if (mIsDeviceLinked)
5135 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5136 else
5137 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5138
5139 pthread_mutex_lock(&gCamLock);
5140
5141 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5142 LOGE("Dualcam: Invalid Session Id ");
5143 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005144 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005145 goto error_exit;
5146 }
5147
5148 if (mIsMainCamera == 1) {
5149 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5150 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005151 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005152 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005153 // related session id should be session id of linked session
5154 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5155 } else {
5156 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5157 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005158 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005159 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005160 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5161 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005162 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005163 pthread_mutex_unlock(&gCamLock);
5164
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005165 rc = mCameraHandle->ops->set_dual_cam_cmd(
5166 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005167 if (rc < 0) {
5168 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005169 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005170 goto error_exit;
5171 }
5172 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005173 goto no_error;
5174error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005175 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005176 return rc;
5177no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005178 mWokenUpByDaemon = false;
5179 mPendingLiveRequest = 0;
5180 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005181 }
5182
5183 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005184 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005185
5186 if (mFlushPerf) {
5187 //we cannot accept any requests during flush
5188 LOGE("process_capture_request cannot proceed during flush");
5189 pthread_mutex_unlock(&mMutex);
5190 return NO_ERROR; //should return an error
5191 }
5192
5193 if (meta.exists(ANDROID_REQUEST_ID)) {
5194 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5195 mCurrentRequestId = request_id;
5196 LOGD("Received request with id: %d", request_id);
5197 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5198 LOGE("Unable to find request id field, \
5199 & no previous id available");
5200 pthread_mutex_unlock(&mMutex);
5201 return NAME_NOT_FOUND;
5202 } else {
5203 LOGD("Re-using old request id");
5204 request_id = mCurrentRequestId;
5205 }
5206
5207 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5208 request->num_output_buffers,
5209 request->input_buffer,
5210 frameNumber);
5211 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005212 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005213 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005214 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005215 uint32_t snapshotStreamId = 0;
5216 for (size_t i = 0; i < request->num_output_buffers; i++) {
5217 const camera3_stream_buffer_t& output = request->output_buffers[i];
5218 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5219
Emilian Peev7650c122017-01-19 08:24:33 -08005220 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5221 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005222 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005223 blob_request = 1;
5224 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5225 }
5226
5227 if (output.acquire_fence != -1) {
5228 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5229 close(output.acquire_fence);
5230 if (rc != OK) {
5231 LOGE("sync wait failed %d", rc);
5232 pthread_mutex_unlock(&mMutex);
5233 return rc;
5234 }
5235 }
5236
Emilian Peev0f3c3162017-03-15 12:57:46 +00005237 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5238 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005239 depthRequestPresent = true;
5240 continue;
5241 }
5242
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005243 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005244 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005245
5246 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5247 isVidBufRequested = true;
5248 }
5249 }
5250
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005251 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5252 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5253 itr++) {
5254 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5255 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5256 channel->getStreamID(channel->getStreamTypeMask());
5257
5258 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5259 isVidBufRequested = true;
5260 }
5261 }
5262
Thierry Strudel3d639192016-09-09 11:52:26 -07005263 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005264 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005265 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005266 }
5267 if (blob_request && mRawDumpChannel) {
5268 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005269 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005270 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005271 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005272 }
5273
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005274 {
5275 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5276 // Request a RAW buffer if
5277 // 1. mHdrPlusRawSrcChannel is valid.
5278 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5279 // 3. There is no pending HDR+ request.
5280 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5281 mHdrPlusPendingRequests.size() == 0) {
5282 streamsArray.stream_request[streamsArray.num_streams].streamID =
5283 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5284 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5285 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005286 }
5287
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005288 //extract capture intent
5289 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5290 mCaptureIntent =
5291 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5292 }
5293
5294 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5295 mCacMode =
5296 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5297 }
5298
5299 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005300 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005301
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005302 {
5303 Mutex::Autolock l(gHdrPlusClientLock);
5304 // If this request has a still capture intent, try to submit an HDR+ request.
5305 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5306 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5307 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5308 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005309 }
5310
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005311 if (hdrPlusRequest) {
5312 // For a HDR+ request, just set the frame parameters.
5313 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5314 if (rc < 0) {
5315 LOGE("fail to set frame parameters");
5316 pthread_mutex_unlock(&mMutex);
5317 return rc;
5318 }
5319 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005320 /* Parse the settings:
5321 * - For every request in NORMAL MODE
5322 * - For every request in HFR mode during preview only case
5323 * - For first request of every batch in HFR mode during video
5324 * recording. In batchmode the same settings except frame number is
5325 * repeated in each request of the batch.
5326 */
5327 if (!mBatchSize ||
5328 (mBatchSize && !isVidBufRequested) ||
5329 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005330 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005331 if (rc < 0) {
5332 LOGE("fail to set frame parameters");
5333 pthread_mutex_unlock(&mMutex);
5334 return rc;
5335 }
5336 }
5337 /* For batchMode HFR, setFrameParameters is not called for every
5338 * request. But only frame number of the latest request is parsed.
5339 * Keep track of first and last frame numbers in a batch so that
5340 * metadata for the frame numbers of batch can be duplicated in
5341 * handleBatchMetadta */
5342 if (mBatchSize) {
5343 if (!mToBeQueuedVidBufs) {
5344 //start of the batch
5345 mFirstFrameNumberInBatch = request->frame_number;
5346 }
5347 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5348 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5349 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005350 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005351 return BAD_VALUE;
5352 }
5353 }
5354 if (mNeedSensorRestart) {
5355 /* Unlock the mutex as restartSensor waits on the channels to be
5356 * stopped, which in turn calls stream callback functions -
5357 * handleBufferWithLock and handleMetadataWithLock */
5358 pthread_mutex_unlock(&mMutex);
5359 rc = dynamicUpdateMetaStreamInfo();
5360 if (rc != NO_ERROR) {
5361 LOGE("Restarting the sensor failed");
5362 return BAD_VALUE;
5363 }
5364 mNeedSensorRestart = false;
5365 pthread_mutex_lock(&mMutex);
5366 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005367 if(mResetInstantAEC) {
5368 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5369 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5370 mResetInstantAEC = false;
5371 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005372 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005373 if (request->input_buffer->acquire_fence != -1) {
5374 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5375 close(request->input_buffer->acquire_fence);
5376 if (rc != OK) {
5377 LOGE("input buffer sync wait failed %d", rc);
5378 pthread_mutex_unlock(&mMutex);
5379 return rc;
5380 }
5381 }
5382 }
5383
5384 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5385 mLastCustIntentFrmNum = frameNumber;
5386 }
5387 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005388 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005389 pendingRequestIterator latestRequest;
5390 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005391 pendingRequest.num_buffers = depthRequestPresent ?
5392 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005393 pendingRequest.request_id = request_id;
5394 pendingRequest.blob_request = blob_request;
5395 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005396 if (request->input_buffer) {
5397 pendingRequest.input_buffer =
5398 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5399 *(pendingRequest.input_buffer) = *(request->input_buffer);
5400 pInputBuffer = pendingRequest.input_buffer;
5401 } else {
5402 pendingRequest.input_buffer = NULL;
5403 pInputBuffer = NULL;
5404 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005405 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005406
5407 pendingRequest.pipeline_depth = 0;
5408 pendingRequest.partial_result_cnt = 0;
5409 extractJpegMetadata(mCurJpegMeta, request);
5410 pendingRequest.jpegMetadata = mCurJpegMeta;
5411 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005412 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005413 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5414 mHybridAeEnable =
5415 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5416 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005417
5418 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5419 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005420 /* DevCamDebug metadata processCaptureRequest */
5421 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5422 mDevCamDebugMetaEnable =
5423 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5424 }
5425 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5426 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005427
5428 //extract CAC info
5429 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5430 mCacMode =
5431 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5432 }
5433 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005434 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005435
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005436 // extract enableZsl info
5437 if (gExposeEnableZslKey) {
5438 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5439 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5440 mZslEnabled = pendingRequest.enableZsl;
5441 } else {
5442 pendingRequest.enableZsl = mZslEnabled;
5443 }
5444 }
5445
Thierry Strudel3d639192016-09-09 11:52:26 -07005446 PendingBuffersInRequest bufsForCurRequest;
5447 bufsForCurRequest.frame_number = frameNumber;
5448 // Mark current timestamp for the new request
5449 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005450 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005451
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005452 if (hdrPlusRequest) {
5453 // Save settings for this request.
5454 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5455 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5456
5457 // Add to pending HDR+ request queue.
5458 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5459 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5460
5461 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5462 }
5463
Thierry Strudel3d639192016-09-09 11:52:26 -07005464 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005465 if ((request->output_buffers[i].stream->data_space ==
5466 HAL_DATASPACE_DEPTH) &&
5467 (HAL_PIXEL_FORMAT_BLOB ==
5468 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005469 continue;
5470 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005471 RequestedBufferInfo requestedBuf;
5472 memset(&requestedBuf, 0, sizeof(requestedBuf));
5473 requestedBuf.stream = request->output_buffers[i].stream;
5474 requestedBuf.buffer = NULL;
5475 pendingRequest.buffers.push_back(requestedBuf);
5476
5477 // Add to buffer handle the pending buffers list
5478 PendingBufferInfo bufferInfo;
5479 bufferInfo.buffer = request->output_buffers[i].buffer;
5480 bufferInfo.stream = request->output_buffers[i].stream;
5481 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5482 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5483 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5484 frameNumber, bufferInfo.buffer,
5485 channel->getStreamTypeMask(), bufferInfo.stream->format);
5486 }
5487 // Add this request packet into mPendingBuffersMap
5488 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5489 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5490 mPendingBuffersMap.get_num_overall_buffers());
5491
5492 latestRequest = mPendingRequestsList.insert(
5493 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005494
5495 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5496 // for the frame number.
5497 mShutterDispatcher.expectShutter(frameNumber);
5498 for (size_t i = 0; i < request->num_output_buffers; i++) {
5499 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5500 }
5501
Thierry Strudel3d639192016-09-09 11:52:26 -07005502 if(mFlush) {
5503 LOGI("mFlush is true");
5504 pthread_mutex_unlock(&mMutex);
5505 return NO_ERROR;
5506 }
5507
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005508 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5509 // channel.
5510 if (!hdrPlusRequest) {
5511 int indexUsed;
5512 // Notify metadata channel we receive a request
5513 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005514
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005515 if(request->input_buffer != NULL){
5516 LOGD("Input request, frame_number %d", frameNumber);
5517 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5518 if (NO_ERROR != rc) {
5519 LOGE("fail to set reproc parameters");
5520 pthread_mutex_unlock(&mMutex);
5521 return rc;
5522 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005523 }
5524
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005525 // Call request on other streams
5526 uint32_t streams_need_metadata = 0;
5527 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5528 for (size_t i = 0; i < request->num_output_buffers; i++) {
5529 const camera3_stream_buffer_t& output = request->output_buffers[i];
5530 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5531
5532 if (channel == NULL) {
5533 LOGW("invalid channel pointer for stream");
5534 continue;
5535 }
5536
5537 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5538 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5539 output.buffer, request->input_buffer, frameNumber);
5540 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005541 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005542 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5543 if (rc < 0) {
5544 LOGE("Fail to request on picture channel");
5545 pthread_mutex_unlock(&mMutex);
5546 return rc;
5547 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005548 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005549 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5550 assert(NULL != mDepthChannel);
5551 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005552
Emilian Peev7650c122017-01-19 08:24:33 -08005553 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5554 if (rc < 0) {
5555 LOGE("Fail to map on depth buffer");
5556 pthread_mutex_unlock(&mMutex);
5557 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005558 }
Emilian Peev7650c122017-01-19 08:24:33 -08005559 } else {
5560 LOGD("snapshot request with buffer %p, frame_number %d",
5561 output.buffer, frameNumber);
5562 if (!request->settings) {
5563 rc = channel->request(output.buffer, frameNumber,
5564 NULL, mPrevParameters, indexUsed);
5565 } else {
5566 rc = channel->request(output.buffer, frameNumber,
5567 NULL, mParameters, indexUsed);
5568 }
5569 if (rc < 0) {
5570 LOGE("Fail to request on picture channel");
5571 pthread_mutex_unlock(&mMutex);
5572 return rc;
5573 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005574
Emilian Peev7650c122017-01-19 08:24:33 -08005575 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5576 uint32_t j = 0;
5577 for (j = 0; j < streamsArray.num_streams; j++) {
5578 if (streamsArray.stream_request[j].streamID == streamId) {
5579 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5580 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5581 else
5582 streamsArray.stream_request[j].buf_index = indexUsed;
5583 break;
5584 }
5585 }
5586 if (j == streamsArray.num_streams) {
5587 LOGE("Did not find matching stream to update index");
5588 assert(0);
5589 }
5590
5591 pendingBufferIter->need_metadata = true;
5592 streams_need_metadata++;
5593 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005594 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005595 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5596 bool needMetadata = false;
5597 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5598 rc = yuvChannel->request(output.buffer, frameNumber,
5599 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5600 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005601 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005602 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005603 pthread_mutex_unlock(&mMutex);
5604 return rc;
5605 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005606
5607 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5608 uint32_t j = 0;
5609 for (j = 0; j < streamsArray.num_streams; j++) {
5610 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005611 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5612 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5613 else
5614 streamsArray.stream_request[j].buf_index = indexUsed;
5615 break;
5616 }
5617 }
5618 if (j == streamsArray.num_streams) {
5619 LOGE("Did not find matching stream to update index");
5620 assert(0);
5621 }
5622
5623 pendingBufferIter->need_metadata = needMetadata;
5624 if (needMetadata)
5625 streams_need_metadata += 1;
5626 LOGD("calling YUV channel request, need_metadata is %d",
5627 needMetadata);
5628 } else {
5629 LOGD("request with buffer %p, frame_number %d",
5630 output.buffer, frameNumber);
5631
5632 rc = channel->request(output.buffer, frameNumber, indexUsed);
5633
5634 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5635 uint32_t j = 0;
5636 for (j = 0; j < streamsArray.num_streams; j++) {
5637 if (streamsArray.stream_request[j].streamID == streamId) {
5638 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5639 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5640 else
5641 streamsArray.stream_request[j].buf_index = indexUsed;
5642 break;
5643 }
5644 }
5645 if (j == streamsArray.num_streams) {
5646 LOGE("Did not find matching stream to update index");
5647 assert(0);
5648 }
5649
5650 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5651 && mBatchSize) {
5652 mToBeQueuedVidBufs++;
5653 if (mToBeQueuedVidBufs == mBatchSize) {
5654 channel->queueBatchBuf();
5655 }
5656 }
5657 if (rc < 0) {
5658 LOGE("request failed");
5659 pthread_mutex_unlock(&mMutex);
5660 return rc;
5661 }
5662 }
5663 pendingBufferIter++;
5664 }
5665
5666 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5667 itr++) {
5668 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5669
5670 if (channel == NULL) {
5671 LOGE("invalid channel pointer for stream");
5672 assert(0);
5673 return BAD_VALUE;
5674 }
5675
5676 InternalRequest requestedStream;
5677 requestedStream = (*itr);
5678
5679
5680 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5681 LOGD("snapshot request internally input buffer %p, frame_number %d",
5682 request->input_buffer, frameNumber);
5683 if(request->input_buffer != NULL){
5684 rc = channel->request(NULL, frameNumber,
5685 pInputBuffer, &mReprocMeta, indexUsed, true,
5686 requestedStream.meteringOnly);
5687 if (rc < 0) {
5688 LOGE("Fail to request on picture channel");
5689 pthread_mutex_unlock(&mMutex);
5690 return rc;
5691 }
5692 } else {
5693 LOGD("snapshot request with frame_number %d", frameNumber);
5694 if (!request->settings) {
5695 rc = channel->request(NULL, frameNumber,
5696 NULL, mPrevParameters, indexUsed, true,
5697 requestedStream.meteringOnly);
5698 } else {
5699 rc = channel->request(NULL, frameNumber,
5700 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5701 }
5702 if (rc < 0) {
5703 LOGE("Fail to request on picture channel");
5704 pthread_mutex_unlock(&mMutex);
5705 return rc;
5706 }
5707
5708 if ((*itr).meteringOnly != 1) {
5709 requestedStream.need_metadata = 1;
5710 streams_need_metadata++;
5711 }
5712 }
5713
5714 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5715 uint32_t j = 0;
5716 for (j = 0; j < streamsArray.num_streams; j++) {
5717 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005718 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5719 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5720 else
5721 streamsArray.stream_request[j].buf_index = indexUsed;
5722 break;
5723 }
5724 }
5725 if (j == streamsArray.num_streams) {
5726 LOGE("Did not find matching stream to update index");
5727 assert(0);
5728 }
5729
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005730 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005731 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005732 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005733 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005734 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005735 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005736 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005737
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005738 //If 2 streams have need_metadata set to true, fail the request, unless
5739 //we copy/reference count the metadata buffer
5740 if (streams_need_metadata > 1) {
5741 LOGE("not supporting request in which two streams requires"
5742 " 2 HAL metadata for reprocessing");
5743 pthread_mutex_unlock(&mMutex);
5744 return -EINVAL;
5745 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005746
Emilian Peev7650c122017-01-19 08:24:33 -08005747 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5749 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5750 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5751 pthread_mutex_unlock(&mMutex);
5752 return BAD_VALUE;
5753 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005754 if (request->input_buffer == NULL) {
5755 /* Set the parameters to backend:
5756 * - For every request in NORMAL MODE
5757 * - For every request in HFR mode during preview only case
5758 * - Once every batch in HFR mode during video recording
5759 */
5760 if (!mBatchSize ||
5761 (mBatchSize && !isVidBufRequested) ||
5762 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5763 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5764 mBatchSize, isVidBufRequested,
5765 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005766
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005767 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5768 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5769 uint32_t m = 0;
5770 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5771 if (streamsArray.stream_request[k].streamID ==
5772 mBatchedStreamsArray.stream_request[m].streamID)
5773 break;
5774 }
5775 if (m == mBatchedStreamsArray.num_streams) {
5776 mBatchedStreamsArray.stream_request\
5777 [mBatchedStreamsArray.num_streams].streamID =
5778 streamsArray.stream_request[k].streamID;
5779 mBatchedStreamsArray.stream_request\
5780 [mBatchedStreamsArray.num_streams].buf_index =
5781 streamsArray.stream_request[k].buf_index;
5782 mBatchedStreamsArray.num_streams =
5783 mBatchedStreamsArray.num_streams + 1;
5784 }
5785 }
5786 streamsArray = mBatchedStreamsArray;
5787 }
5788 /* Update stream id of all the requested buffers */
5789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5790 streamsArray)) {
5791 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005792 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005793 return BAD_VALUE;
5794 }
5795
5796 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5797 mParameters);
5798 if (rc < 0) {
5799 LOGE("set_parms failed");
5800 }
5801 /* reset to zero coz, the batch is queued */
5802 mToBeQueuedVidBufs = 0;
5803 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5804 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5805 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005806 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5807 uint32_t m = 0;
5808 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5809 if (streamsArray.stream_request[k].streamID ==
5810 mBatchedStreamsArray.stream_request[m].streamID)
5811 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005812 }
5813 if (m == mBatchedStreamsArray.num_streams) {
5814 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5815 streamID = streamsArray.stream_request[k].streamID;
5816 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5817 buf_index = streamsArray.stream_request[k].buf_index;
5818 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5819 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005820 }
5821 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005822 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005823
5824 // Start all streams after the first setting is sent, so that the
5825 // setting can be applied sooner: (0 + apply_delay)th frame.
5826 if (mState == CONFIGURED && mChannelHandle) {
5827 //Then start them.
5828 LOGH("Start META Channel");
5829 rc = mMetadataChannel->start();
5830 if (rc < 0) {
5831 LOGE("META channel start failed");
5832 pthread_mutex_unlock(&mMutex);
5833 return rc;
5834 }
5835
5836 if (mAnalysisChannel) {
5837 rc = mAnalysisChannel->start();
5838 if (rc < 0) {
5839 LOGE("Analysis channel start failed");
5840 mMetadataChannel->stop();
5841 pthread_mutex_unlock(&mMutex);
5842 return rc;
5843 }
5844 }
5845
5846 if (mSupportChannel) {
5847 rc = mSupportChannel->start();
5848 if (rc < 0) {
5849 LOGE("Support channel start failed");
5850 mMetadataChannel->stop();
5851 /* Although support and analysis are mutually exclusive today
5852 adding it in anycase for future proofing */
5853 if (mAnalysisChannel) {
5854 mAnalysisChannel->stop();
5855 }
5856 pthread_mutex_unlock(&mMutex);
5857 return rc;
5858 }
5859 }
5860 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5861 it != mStreamInfo.end(); it++) {
5862 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5863 LOGH("Start Processing Channel mask=%d",
5864 channel->getStreamTypeMask());
5865 rc = channel->start();
5866 if (rc < 0) {
5867 LOGE("channel start failed");
5868 pthread_mutex_unlock(&mMutex);
5869 return rc;
5870 }
5871 }
5872
5873 if (mRawDumpChannel) {
5874 LOGD("Starting raw dump stream");
5875 rc = mRawDumpChannel->start();
5876 if (rc != NO_ERROR) {
5877 LOGE("Error Starting Raw Dump Channel");
5878 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5879 it != mStreamInfo.end(); it++) {
5880 QCamera3Channel *channel =
5881 (QCamera3Channel *)(*it)->stream->priv;
5882 LOGH("Stopping Processing Channel mask=%d",
5883 channel->getStreamTypeMask());
5884 channel->stop();
5885 }
5886 if (mSupportChannel)
5887 mSupportChannel->stop();
5888 if (mAnalysisChannel) {
5889 mAnalysisChannel->stop();
5890 }
5891 mMetadataChannel->stop();
5892 pthread_mutex_unlock(&mMutex);
5893 return rc;
5894 }
5895 }
5896
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005897 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005898 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005899 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005900 if (rc != NO_ERROR) {
5901 LOGE("start_channel failed %d", rc);
5902 pthread_mutex_unlock(&mMutex);
5903 return rc;
5904 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005905
5906 {
5907 // Configure Easel for stream on.
5908 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005909
5910 // Now that sensor mode should have been selected, get the selected sensor mode
5911 // info.
5912 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5913 getCurrentSensorModeInfo(mSensorModeInfo);
5914
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005915 if (EaselManagerClientOpened) {
5916 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chena6c99062017-05-23 13:45:06 -07005917 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5918 /*enableIpu*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005919 if (rc != OK) {
5920 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5921 mCameraId, mSensorModeInfo.op_pixel_clk);
5922 pthread_mutex_unlock(&mMutex);
5923 return rc;
5924 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005925 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005926 }
5927 }
5928
5929 // Start sensor streaming.
5930 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5931 mChannelHandle);
5932 if (rc != NO_ERROR) {
5933 LOGE("start_sensor_stream_on failed %d", rc);
5934 pthread_mutex_unlock(&mMutex);
5935 return rc;
5936 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005937 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005938 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005939 }
5940
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005941 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07005942 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005943 Mutex::Autolock l(gHdrPlusClientLock);
5944 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5945 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5946 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5947 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5948 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5949 rc = enableHdrPlusModeLocked();
5950 if (rc != OK) {
5951 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5952 pthread_mutex_unlock(&mMutex);
5953 return rc;
5954 }
5955
5956 mFirstPreviewIntentSeen = true;
5957 }
5958 }
5959
Thierry Strudel3d639192016-09-09 11:52:26 -07005960 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5961
5962 mState = STARTED;
5963 // Added a timed condition wait
5964 struct timespec ts;
5965 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005966 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005967 if (rc < 0) {
5968 isValidTimeout = 0;
5969 LOGE("Error reading the real time clock!!");
5970 }
5971 else {
5972 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005973 int64_t timeout = 5;
5974 {
5975 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5976 // If there is a pending HDR+ request, the following requests may be blocked until the
5977 // HDR+ request is done. So allow a longer timeout.
5978 if (mHdrPlusPendingRequests.size() > 0) {
5979 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5980 }
5981 }
5982 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005983 }
5984 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005985 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005986 (mState != ERROR) && (mState != DEINIT)) {
5987 if (!isValidTimeout) {
5988 LOGD("Blocking on conditional wait");
5989 pthread_cond_wait(&mRequestCond, &mMutex);
5990 }
5991 else {
5992 LOGD("Blocking on timed conditional wait");
5993 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5994 if (rc == ETIMEDOUT) {
5995 rc = -ENODEV;
5996 LOGE("Unblocked on timeout!!!!");
5997 break;
5998 }
5999 }
6000 LOGD("Unblocked");
6001 if (mWokenUpByDaemon) {
6002 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006003 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006004 break;
6005 }
6006 }
6007 pthread_mutex_unlock(&mMutex);
6008
6009 return rc;
6010}
6011
6012/*===========================================================================
6013 * FUNCTION : dump
6014 *
6015 * DESCRIPTION:
6016 *
6017 * PARAMETERS :
6018 *
6019 *
6020 * RETURN :
6021 *==========================================================================*/
6022void QCamera3HardwareInterface::dump(int fd)
6023{
6024 pthread_mutex_lock(&mMutex);
6025 dprintf(fd, "\n Camera HAL3 information Begin \n");
6026
6027 dprintf(fd, "\nNumber of pending requests: %zu \n",
6028 mPendingRequestsList.size());
6029 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6030 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6031 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6032 for(pendingRequestIterator i = mPendingRequestsList.begin();
6033 i != mPendingRequestsList.end(); i++) {
6034 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6035 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6036 i->input_buffer);
6037 }
6038 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6039 mPendingBuffersMap.get_num_overall_buffers());
6040 dprintf(fd, "-------+------------------\n");
6041 dprintf(fd, " Frame | Stream type mask \n");
6042 dprintf(fd, "-------+------------------\n");
6043 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6044 for(auto &j : req.mPendingBufferList) {
6045 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6046 dprintf(fd, " %5d | %11d \n",
6047 req.frame_number, channel->getStreamTypeMask());
6048 }
6049 }
6050 dprintf(fd, "-------+------------------\n");
6051
6052 dprintf(fd, "\nPending frame drop list: %zu\n",
6053 mPendingFrameDropList.size());
6054 dprintf(fd, "-------+-----------\n");
6055 dprintf(fd, " Frame | Stream ID \n");
6056 dprintf(fd, "-------+-----------\n");
6057 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6058 i != mPendingFrameDropList.end(); i++) {
6059 dprintf(fd, " %5d | %9d \n",
6060 i->frame_number, i->stream_ID);
6061 }
6062 dprintf(fd, "-------+-----------\n");
6063
6064 dprintf(fd, "\n Camera HAL3 information End \n");
6065
6066 /* use dumpsys media.camera as trigger to send update debug level event */
6067 mUpdateDebugLevel = true;
6068 pthread_mutex_unlock(&mMutex);
6069 return;
6070}
6071
6072/*===========================================================================
6073 * FUNCTION : flush
6074 *
6075 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6076 * conditionally restarts channels
6077 *
6078 * PARAMETERS :
6079 * @ restartChannels: re-start all channels
6080 *
6081 *
6082 * RETURN :
6083 * 0 on success
6084 * Error code on failure
6085 *==========================================================================*/
6086int QCamera3HardwareInterface::flush(bool restartChannels)
6087{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006088 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006089 int32_t rc = NO_ERROR;
6090
6091 LOGD("Unblocking Process Capture Request");
6092 pthread_mutex_lock(&mMutex);
6093 mFlush = true;
6094 pthread_mutex_unlock(&mMutex);
6095
6096 rc = stopAllChannels();
6097 // unlink of dualcam
6098 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006099 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6100 &m_pDualCamCmdPtr->bundle_info;
6101 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006102 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6103 pthread_mutex_lock(&gCamLock);
6104
6105 if (mIsMainCamera == 1) {
6106 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6107 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006108 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006109 // related session id should be session id of linked session
6110 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6111 } else {
6112 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6113 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006114 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006115 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6116 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006117 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006118 pthread_mutex_unlock(&gCamLock);
6119
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006120 rc = mCameraHandle->ops->set_dual_cam_cmd(
6121 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006122 if (rc < 0) {
6123 LOGE("Dualcam: Unlink failed, but still proceed to close");
6124 }
6125 }
6126
6127 if (rc < 0) {
6128 LOGE("stopAllChannels failed");
6129 return rc;
6130 }
6131 if (mChannelHandle) {
6132 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6133 mChannelHandle);
6134 }
6135
6136 // Reset bundle info
6137 rc = setBundleInfo();
6138 if (rc < 0) {
6139 LOGE("setBundleInfo failed %d", rc);
6140 return rc;
6141 }
6142
6143 // Mutex Lock
6144 pthread_mutex_lock(&mMutex);
6145
6146 // Unblock process_capture_request
6147 mPendingLiveRequest = 0;
6148 pthread_cond_signal(&mRequestCond);
6149
6150 rc = notifyErrorForPendingRequests();
6151 if (rc < 0) {
6152 LOGE("notifyErrorForPendingRequests failed");
6153 pthread_mutex_unlock(&mMutex);
6154 return rc;
6155 }
6156
6157 mFlush = false;
6158
6159 // Start the Streams/Channels
6160 if (restartChannels) {
6161 rc = startAllChannels();
6162 if (rc < 0) {
6163 LOGE("startAllChannels failed");
6164 pthread_mutex_unlock(&mMutex);
6165 return rc;
6166 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006167 if (mChannelHandle) {
6168 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006169 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006170 if (rc < 0) {
6171 LOGE("start_channel failed");
6172 pthread_mutex_unlock(&mMutex);
6173 return rc;
6174 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006175 }
6176 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006177 pthread_mutex_unlock(&mMutex);
6178
6179 return 0;
6180}
6181
6182/*===========================================================================
6183 * FUNCTION : flushPerf
6184 *
6185 * DESCRIPTION: This is the performance optimization version of flush that does
6186 * not use stream off, rather flushes the system
6187 *
6188 * PARAMETERS :
6189 *
6190 *
6191 * RETURN : 0 : success
6192 * -EINVAL: input is malformed (device is not valid)
6193 * -ENODEV: if the device has encountered a serious error
6194 *==========================================================================*/
6195int QCamera3HardwareInterface::flushPerf()
6196{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006197 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006198 int32_t rc = 0;
6199 struct timespec timeout;
6200 bool timed_wait = false;
6201
6202 pthread_mutex_lock(&mMutex);
6203 mFlushPerf = true;
6204 mPendingBuffersMap.numPendingBufsAtFlush =
6205 mPendingBuffersMap.get_num_overall_buffers();
6206 LOGD("Calling flush. Wait for %d buffers to return",
6207 mPendingBuffersMap.numPendingBufsAtFlush);
6208
6209 /* send the flush event to the backend */
6210 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6211 if (rc < 0) {
6212 LOGE("Error in flush: IOCTL failure");
6213 mFlushPerf = false;
6214 pthread_mutex_unlock(&mMutex);
6215 return -ENODEV;
6216 }
6217
6218 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6219 LOGD("No pending buffers in HAL, return flush");
6220 mFlushPerf = false;
6221 pthread_mutex_unlock(&mMutex);
6222 return rc;
6223 }
6224
6225 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006226 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006227 if (rc < 0) {
6228 LOGE("Error reading the real time clock, cannot use timed wait");
6229 } else {
6230 timeout.tv_sec += FLUSH_TIMEOUT;
6231 timed_wait = true;
6232 }
6233
6234 //Block on conditional variable
6235 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6236 LOGD("Waiting on mBuffersCond");
6237 if (!timed_wait) {
6238 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6239 if (rc != 0) {
6240 LOGE("pthread_cond_wait failed due to rc = %s",
6241 strerror(rc));
6242 break;
6243 }
6244 } else {
6245 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6246 if (rc != 0) {
6247 LOGE("pthread_cond_timedwait failed due to rc = %s",
6248 strerror(rc));
6249 break;
6250 }
6251 }
6252 }
6253 if (rc != 0) {
6254 mFlushPerf = false;
6255 pthread_mutex_unlock(&mMutex);
6256 return -ENODEV;
6257 }
6258
6259 LOGD("Received buffers, now safe to return them");
6260
6261 //make sure the channels handle flush
6262 //currently only required for the picture channel to release snapshot resources
6263 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6264 it != mStreamInfo.end(); it++) {
6265 QCamera3Channel *channel = (*it)->channel;
6266 if (channel) {
6267 rc = channel->flush();
6268 if (rc) {
6269 LOGE("Flushing the channels failed with error %d", rc);
6270 // even though the channel flush failed we need to continue and
6271 // return the buffers we have to the framework, however the return
6272 // value will be an error
6273 rc = -ENODEV;
6274 }
6275 }
6276 }
6277
6278 /* notify the frameworks and send errored results */
6279 rc = notifyErrorForPendingRequests();
6280 if (rc < 0) {
6281 LOGE("notifyErrorForPendingRequests failed");
6282 pthread_mutex_unlock(&mMutex);
6283 return rc;
6284 }
6285
6286 //unblock process_capture_request
6287 mPendingLiveRequest = 0;
6288 unblockRequestIfNecessary();
6289
6290 mFlushPerf = false;
6291 pthread_mutex_unlock(&mMutex);
6292 LOGD ("Flush Operation complete. rc = %d", rc);
6293 return rc;
6294}
6295
6296/*===========================================================================
6297 * FUNCTION : handleCameraDeviceError
6298 *
6299 * DESCRIPTION: This function calls internal flush and notifies the error to
6300 * framework and updates the state variable.
6301 *
6302 * PARAMETERS : None
6303 *
6304 * RETURN : NO_ERROR on Success
6305 * Error code on failure
6306 *==========================================================================*/
6307int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6308{
6309 int32_t rc = NO_ERROR;
6310
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006311 {
6312 Mutex::Autolock lock(mFlushLock);
6313 pthread_mutex_lock(&mMutex);
6314 if (mState != ERROR) {
6315 //if mState != ERROR, nothing to be done
6316 pthread_mutex_unlock(&mMutex);
6317 return NO_ERROR;
6318 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006319 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006320
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006321 rc = flush(false /* restart channels */);
6322 if (NO_ERROR != rc) {
6323 LOGE("internal flush to handle mState = ERROR failed");
6324 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006325
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006326 pthread_mutex_lock(&mMutex);
6327 mState = DEINIT;
6328 pthread_mutex_unlock(&mMutex);
6329 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006330
6331 camera3_notify_msg_t notify_msg;
6332 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6333 notify_msg.type = CAMERA3_MSG_ERROR;
6334 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6335 notify_msg.message.error.error_stream = NULL;
6336 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006337 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006338
6339 return rc;
6340}
6341
6342/*===========================================================================
6343 * FUNCTION : captureResultCb
6344 *
6345 * DESCRIPTION: Callback handler for all capture result
6346 * (streams, as well as metadata)
6347 *
6348 * PARAMETERS :
6349 * @metadata : metadata information
6350 * @buffer : actual gralloc buffer to be returned to frameworks.
6351 * NULL if metadata.
6352 *
6353 * RETURN : NONE
6354 *==========================================================================*/
6355void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6356 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6357{
6358 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006359 pthread_mutex_lock(&mMutex);
6360 uint8_t batchSize = mBatchSize;
6361 pthread_mutex_unlock(&mMutex);
6362 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006363 handleBatchMetadata(metadata_buf,
6364 true /* free_and_bufdone_meta_buf */);
6365 } else { /* mBatchSize = 0 */
6366 hdrPlusPerfLock(metadata_buf);
6367 pthread_mutex_lock(&mMutex);
6368 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006369 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006370 true /* last urgent frame of batch metadata */,
6371 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006372 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006373 pthread_mutex_unlock(&mMutex);
6374 }
6375 } else if (isInputBuffer) {
6376 pthread_mutex_lock(&mMutex);
6377 handleInputBufferWithLock(frame_number);
6378 pthread_mutex_unlock(&mMutex);
6379 } else {
6380 pthread_mutex_lock(&mMutex);
6381 handleBufferWithLock(buffer, frame_number);
6382 pthread_mutex_unlock(&mMutex);
6383 }
6384 return;
6385}
6386
6387/*===========================================================================
6388 * FUNCTION : getReprocessibleOutputStreamId
6389 *
6390 * DESCRIPTION: Get source output stream id for the input reprocess stream
6391 * based on size and format, which would be the largest
6392 * output stream if an input stream exists.
6393 *
6394 * PARAMETERS :
6395 * @id : return the stream id if found
6396 *
6397 * RETURN : int32_t type of status
6398 * NO_ERROR -- success
6399 * none-zero failure code
6400 *==========================================================================*/
6401int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6402{
6403 /* check if any output or bidirectional stream with the same size and format
6404 and return that stream */
6405 if ((mInputStreamInfo.dim.width > 0) &&
6406 (mInputStreamInfo.dim.height > 0)) {
6407 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6408 it != mStreamInfo.end(); it++) {
6409
6410 camera3_stream_t *stream = (*it)->stream;
6411 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6412 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6413 (stream->format == mInputStreamInfo.format)) {
6414 // Usage flag for an input stream and the source output stream
6415 // may be different.
6416 LOGD("Found reprocessible output stream! %p", *it);
6417 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6418 stream->usage, mInputStreamInfo.usage);
6419
6420 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6421 if (channel != NULL && channel->mStreams[0]) {
6422 id = channel->mStreams[0]->getMyServerID();
6423 return NO_ERROR;
6424 }
6425 }
6426 }
6427 } else {
6428 LOGD("No input stream, so no reprocessible output stream");
6429 }
6430 return NAME_NOT_FOUND;
6431}
6432
6433/*===========================================================================
6434 * FUNCTION : lookupFwkName
6435 *
6436 * DESCRIPTION: In case the enum is not same in fwk and backend
6437 * make sure the parameter is correctly propogated
6438 *
6439 * PARAMETERS :
6440 * @arr : map between the two enums
6441 * @len : len of the map
6442 * @hal_name : name of the hal_parm to map
6443 *
6444 * RETURN : int type of status
6445 * fwk_name -- success
6446 * none-zero failure code
6447 *==========================================================================*/
6448template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6449 size_t len, halType hal_name)
6450{
6451
6452 for (size_t i = 0; i < len; i++) {
6453 if (arr[i].hal_name == hal_name) {
6454 return arr[i].fwk_name;
6455 }
6456 }
6457
6458 /* Not able to find matching framework type is not necessarily
6459 * an error case. This happens when mm-camera supports more attributes
6460 * than the frameworks do */
6461 LOGH("Cannot find matching framework type");
6462 return NAME_NOT_FOUND;
6463}
6464
6465/*===========================================================================
6466 * FUNCTION : lookupHalName
6467 *
6468 * DESCRIPTION: In case the enum is not same in fwk and backend
6469 * make sure the parameter is correctly propogated
6470 *
6471 * PARAMETERS :
6472 * @arr : map between the two enums
6473 * @len : len of the map
6474 * @fwk_name : name of the hal_parm to map
6475 *
6476 * RETURN : int32_t type of status
6477 * hal_name -- success
6478 * none-zero failure code
6479 *==========================================================================*/
6480template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6481 size_t len, fwkType fwk_name)
6482{
6483 for (size_t i = 0; i < len; i++) {
6484 if (arr[i].fwk_name == fwk_name) {
6485 return arr[i].hal_name;
6486 }
6487 }
6488
6489 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6490 return NAME_NOT_FOUND;
6491}
6492
6493/*===========================================================================
6494 * FUNCTION : lookupProp
6495 *
6496 * DESCRIPTION: lookup a value by its name
6497 *
6498 * PARAMETERS :
6499 * @arr : map between the two enums
6500 * @len : size of the map
6501 * @name : name to be looked up
6502 *
6503 * RETURN : Value if found
6504 * CAM_CDS_MODE_MAX if not found
6505 *==========================================================================*/
6506template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6507 size_t len, const char *name)
6508{
6509 if (name) {
6510 for (size_t i = 0; i < len; i++) {
6511 if (!strcmp(arr[i].desc, name)) {
6512 return arr[i].val;
6513 }
6514 }
6515 }
6516 return CAM_CDS_MODE_MAX;
6517}
6518
6519/*===========================================================================
6520 *
6521 * DESCRIPTION:
6522 *
6523 * PARAMETERS :
6524 * @metadata : metadata information from callback
6525 * @timestamp: metadata buffer timestamp
6526 * @request_id: request id
6527 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006528 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006529 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6530 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006531 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006532 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6533 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006534 *
6535 * RETURN : camera_metadata_t*
6536 * metadata in a format specified by fwk
6537 *==========================================================================*/
6538camera_metadata_t*
6539QCamera3HardwareInterface::translateFromHalMetadata(
6540 metadata_buffer_t *metadata,
6541 nsecs_t timestamp,
6542 int32_t request_id,
6543 const CameraMetadata& jpegMetadata,
6544 uint8_t pipeline_depth,
6545 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006546 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006547 /* DevCamDebug metadata translateFromHalMetadata argument */
6548 uint8_t DevCamDebug_meta_enable,
6549 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006550 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006551 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006552 bool lastMetadataInBatch,
6553 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006554{
6555 CameraMetadata camMetadata;
6556 camera_metadata_t *resultMetadata;
6557
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006558 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006559 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6560 * Timestamp is needed because it's used for shutter notify calculation.
6561 * */
6562 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6563 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006564 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006565 }
6566
Thierry Strudel3d639192016-09-09 11:52:26 -07006567 if (jpegMetadata.entryCount())
6568 camMetadata.append(jpegMetadata);
6569
6570 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6571 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6572 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6573 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006574 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006575 if (mBatchSize == 0) {
6576 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6577 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6578 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006579
Samuel Ha68ba5172016-12-15 18:41:12 -08006580 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6581 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6582 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6583 // DevCamDebug metadata translateFromHalMetadata AF
6584 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6585 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6586 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6587 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6588 }
6589 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6590 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6591 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6592 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6593 }
6594 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6595 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6596 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6597 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6598 }
6599 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6600 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6601 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6602 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6603 }
6604 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6605 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6606 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6607 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6608 }
6609 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6610 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6611 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6612 *DevCamDebug_af_monitor_pdaf_target_pos;
6613 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6614 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6615 }
6616 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6617 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6618 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6619 *DevCamDebug_af_monitor_pdaf_confidence;
6620 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6621 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6622 }
6623 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6624 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6625 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6626 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6627 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6628 }
6629 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6630 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6631 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6632 *DevCamDebug_af_monitor_tof_target_pos;
6633 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6634 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6635 }
6636 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6637 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6638 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6639 *DevCamDebug_af_monitor_tof_confidence;
6640 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6641 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6642 }
6643 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6644 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6645 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6646 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6647 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6648 }
6649 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6650 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6651 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6652 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6653 &fwk_DevCamDebug_af_monitor_type_select, 1);
6654 }
6655 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6656 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6657 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6658 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6659 &fwk_DevCamDebug_af_monitor_refocus, 1);
6660 }
6661 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6662 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6663 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6664 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6665 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6666 }
6667 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6668 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6669 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6670 *DevCamDebug_af_search_pdaf_target_pos;
6671 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6672 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6673 }
6674 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6675 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6676 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6677 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6678 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6679 }
6680 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6681 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6682 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6683 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6684 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6685 }
6686 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6687 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6688 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6689 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6690 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6691 }
6692 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6693 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6694 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6695 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6696 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6697 }
6698 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6699 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6700 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6701 *DevCamDebug_af_search_tof_target_pos;
6702 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6703 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6704 }
6705 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6706 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6707 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6708 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6709 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6710 }
6711 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6712 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6713 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6714 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6715 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6716 }
6717 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6718 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6719 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6720 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6721 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6722 }
6723 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6724 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6725 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6726 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6727 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6728 }
6729 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6730 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6731 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6732 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6733 &fwk_DevCamDebug_af_search_type_select, 1);
6734 }
6735 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6736 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6737 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6738 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6739 &fwk_DevCamDebug_af_search_next_pos, 1);
6740 }
6741 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6742 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6743 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6744 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6745 &fwk_DevCamDebug_af_search_target_pos, 1);
6746 }
6747 // DevCamDebug metadata translateFromHalMetadata AEC
6748 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6749 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6750 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6751 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6752 }
6753 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6754 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6755 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6756 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6757 }
6758 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6759 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6760 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6761 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6762 }
6763 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6764 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6765 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6766 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6767 }
6768 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6769 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6770 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6771 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6772 }
6773 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6774 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6775 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6776 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6777 }
6778 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6779 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6780 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6781 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6782 }
6783 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6784 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6785 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6786 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6787 }
Samuel Ha34229982017-02-17 13:51:11 -08006788 // DevCamDebug metadata translateFromHalMetadata zzHDR
6789 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6790 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6791 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6792 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6793 }
6794 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6795 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006796 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006797 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6798 }
6799 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6800 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6801 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6802 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6803 }
6804 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6805 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006806 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006807 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6808 }
6809 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6810 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6811 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6812 *DevCamDebug_aec_hdr_sensitivity_ratio;
6813 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6814 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6815 }
6816 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6817 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6818 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6819 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6820 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6821 }
6822 // DevCamDebug metadata translateFromHalMetadata ADRC
6823 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6824 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6825 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6826 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6827 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6828 }
6829 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6830 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6831 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6832 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6833 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6834 }
6835 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6836 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6837 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6838 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6839 }
6840 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6841 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6842 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6843 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6844 }
6845 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6846 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6847 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6848 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6849 }
6850 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6851 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6852 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6853 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6854 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006855 // DevCamDebug metadata translateFromHalMetadata AWB
6856 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6857 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6858 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6859 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6860 }
6861 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6862 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6863 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6864 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6865 }
6866 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6867 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6868 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6869 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6870 }
6871 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6872 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6873 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6874 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6875 }
6876 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6877 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6878 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6879 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6880 }
6881 }
6882 // atrace_end(ATRACE_TAG_ALWAYS);
6883
Thierry Strudel3d639192016-09-09 11:52:26 -07006884 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6885 int64_t fwk_frame_number = *frame_number;
6886 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6887 }
6888
6889 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6890 int32_t fps_range[2];
6891 fps_range[0] = (int32_t)float_range->min_fps;
6892 fps_range[1] = (int32_t)float_range->max_fps;
6893 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6894 fps_range, 2);
6895 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6896 fps_range[0], fps_range[1]);
6897 }
6898
6899 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6900 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6901 }
6902
6903 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6904 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6905 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6906 *sceneMode);
6907 if (NAME_NOT_FOUND != val) {
6908 uint8_t fwkSceneMode = (uint8_t)val;
6909 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6910 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6911 fwkSceneMode);
6912 }
6913 }
6914
6915 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6916 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6917 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6918 }
6919
6920 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6921 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6922 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6923 }
6924
6925 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6926 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6927 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6928 }
6929
6930 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6931 CAM_INTF_META_EDGE_MODE, metadata) {
6932 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6933 }
6934
6935 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6936 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6937 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6938 }
6939
6940 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6941 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6942 }
6943
6944 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6945 if (0 <= *flashState) {
6946 uint8_t fwk_flashState = (uint8_t) *flashState;
6947 if (!gCamCapability[mCameraId]->flash_available) {
6948 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6949 }
6950 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6951 }
6952 }
6953
6954 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6955 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6956 if (NAME_NOT_FOUND != val) {
6957 uint8_t fwk_flashMode = (uint8_t)val;
6958 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6959 }
6960 }
6961
6962 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6963 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6964 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6965 }
6966
6967 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6968 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6969 }
6970
6971 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6972 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6973 }
6974
6975 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6976 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6977 }
6978
6979 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6980 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6981 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6982 }
6983
6984 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6985 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6986 LOGD("fwk_videoStab = %d", fwk_videoStab);
6987 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6988 } else {
6989 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6990 // and so hardcoding the Video Stab result to OFF mode.
6991 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6992 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006993 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006994 }
6995
6996 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6997 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6998 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6999 }
7000
7001 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7002 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7003 }
7004
Thierry Strudel3d639192016-09-09 11:52:26 -07007005 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7006 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007007 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007008
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007009 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7010 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007011
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007012 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007013 blackLevelAppliedPattern->cam_black_level[0],
7014 blackLevelAppliedPattern->cam_black_level[1],
7015 blackLevelAppliedPattern->cam_black_level[2],
7016 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007017 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7018 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007019
7020#ifndef USE_HAL_3_3
7021 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307022 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007023 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307024 fwk_blackLevelInd[0] /= 16.0;
7025 fwk_blackLevelInd[1] /= 16.0;
7026 fwk_blackLevelInd[2] /= 16.0;
7027 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007028 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7029 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007030#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007031 }
7032
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007033#ifndef USE_HAL_3_3
7034 // Fixed whitelevel is used by ISP/Sensor
7035 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7036 &gCamCapability[mCameraId]->white_level, 1);
7037#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007038
7039 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7040 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7041 int32_t scalerCropRegion[4];
7042 scalerCropRegion[0] = hScalerCropRegion->left;
7043 scalerCropRegion[1] = hScalerCropRegion->top;
7044 scalerCropRegion[2] = hScalerCropRegion->width;
7045 scalerCropRegion[3] = hScalerCropRegion->height;
7046
7047 // Adjust crop region from sensor output coordinate system to active
7048 // array coordinate system.
7049 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7050 scalerCropRegion[2], scalerCropRegion[3]);
7051
7052 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7053 }
7054
7055 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7056 LOGD("sensorExpTime = %lld", *sensorExpTime);
7057 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7058 }
7059
7060 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7061 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7062 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7063 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7064 }
7065
7066 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7067 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7068 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7069 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7070 sensorRollingShutterSkew, 1);
7071 }
7072
7073 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7074 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7075 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7076
7077 //calculate the noise profile based on sensitivity
7078 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7079 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7080 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7081 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7082 noise_profile[i] = noise_profile_S;
7083 noise_profile[i+1] = noise_profile_O;
7084 }
7085 LOGD("noise model entry (S, O) is (%f, %f)",
7086 noise_profile_S, noise_profile_O);
7087 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7088 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7089 }
7090
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007091#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007092 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007093 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007094 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007095 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007096 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7097 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7098 }
7099 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007100#endif
7101
Thierry Strudel3d639192016-09-09 11:52:26 -07007102 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7103 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7104 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7105 }
7106
7107 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7108 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7109 *faceDetectMode);
7110 if (NAME_NOT_FOUND != val) {
7111 uint8_t fwk_faceDetectMode = (uint8_t)val;
7112 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7113
7114 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7115 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7116 CAM_INTF_META_FACE_DETECTION, metadata) {
7117 uint8_t numFaces = MIN(
7118 faceDetectionInfo->num_faces_detected, MAX_ROI);
7119 int32_t faceIds[MAX_ROI];
7120 uint8_t faceScores[MAX_ROI];
7121 int32_t faceRectangles[MAX_ROI * 4];
7122 int32_t faceLandmarks[MAX_ROI * 6];
7123 size_t j = 0, k = 0;
7124
7125 for (size_t i = 0; i < numFaces; i++) {
7126 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7127 // Adjust crop region from sensor output coordinate system to active
7128 // array coordinate system.
7129 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7130 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7131 rect.width, rect.height);
7132
7133 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7134 faceRectangles+j, -1);
7135
Jason Lee8ce36fa2017-04-19 19:40:37 -07007136 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7137 "bottom-right (%d, %d)",
7138 faceDetectionInfo->frame_id, i,
7139 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7140 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7141
Thierry Strudel3d639192016-09-09 11:52:26 -07007142 j+= 4;
7143 }
7144 if (numFaces <= 0) {
7145 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7146 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7147 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7148 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7149 }
7150
7151 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7152 numFaces);
7153 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7154 faceRectangles, numFaces * 4U);
7155 if (fwk_faceDetectMode ==
7156 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7157 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7158 CAM_INTF_META_FACE_LANDMARK, metadata) {
7159
7160 for (size_t i = 0; i < numFaces; i++) {
7161 // Map the co-ordinate sensor output coordinate system to active
7162 // array coordinate system.
7163 mCropRegionMapper.toActiveArray(
7164 landmarks->face_landmarks[i].left_eye_center.x,
7165 landmarks->face_landmarks[i].left_eye_center.y);
7166 mCropRegionMapper.toActiveArray(
7167 landmarks->face_landmarks[i].right_eye_center.x,
7168 landmarks->face_landmarks[i].right_eye_center.y);
7169 mCropRegionMapper.toActiveArray(
7170 landmarks->face_landmarks[i].mouth_center.x,
7171 landmarks->face_landmarks[i].mouth_center.y);
7172
7173 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007174
7175 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7176 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7177 faceDetectionInfo->frame_id, i,
7178 faceLandmarks[k + LEFT_EYE_X],
7179 faceLandmarks[k + LEFT_EYE_Y],
7180 faceLandmarks[k + RIGHT_EYE_X],
7181 faceLandmarks[k + RIGHT_EYE_Y],
7182 faceLandmarks[k + MOUTH_X],
7183 faceLandmarks[k + MOUTH_Y]);
7184
Thierry Strudel04e026f2016-10-10 11:27:36 -07007185 k+= TOTAL_LANDMARK_INDICES;
7186 }
7187 } else {
7188 for (size_t i = 0; i < numFaces; i++) {
7189 setInvalidLandmarks(faceLandmarks+k);
7190 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007191 }
7192 }
7193
Jason Lee49619db2017-04-13 12:07:22 -07007194 for (size_t i = 0; i < numFaces; i++) {
7195 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7196
7197 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7198 faceDetectionInfo->frame_id, i, faceIds[i]);
7199 }
7200
Thierry Strudel3d639192016-09-09 11:52:26 -07007201 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7202 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7203 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007204 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007205 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7206 CAM_INTF_META_FACE_BLINK, metadata) {
7207 uint8_t detected[MAX_ROI];
7208 uint8_t degree[MAX_ROI * 2];
7209 for (size_t i = 0; i < numFaces; i++) {
7210 detected[i] = blinks->blink[i].blink_detected;
7211 degree[2 * i] = blinks->blink[i].left_blink;
7212 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007213
Jason Lee49619db2017-04-13 12:07:22 -07007214 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7215 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7216 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7217 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007218 }
7219 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7220 detected, numFaces);
7221 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7222 degree, numFaces * 2);
7223 }
7224 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7225 CAM_INTF_META_FACE_SMILE, metadata) {
7226 uint8_t degree[MAX_ROI];
7227 uint8_t confidence[MAX_ROI];
7228 for (size_t i = 0; i < numFaces; i++) {
7229 degree[i] = smiles->smile[i].smile_degree;
7230 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007231
Jason Lee49619db2017-04-13 12:07:22 -07007232 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7233 "smile_degree=%d, smile_score=%d",
7234 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007235 }
7236 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7237 degree, numFaces);
7238 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7239 confidence, numFaces);
7240 }
7241 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7242 CAM_INTF_META_FACE_GAZE, metadata) {
7243 int8_t angle[MAX_ROI];
7244 int32_t direction[MAX_ROI * 3];
7245 int8_t degree[MAX_ROI * 2];
7246 for (size_t i = 0; i < numFaces; i++) {
7247 angle[i] = gazes->gaze[i].gaze_angle;
7248 direction[3 * i] = gazes->gaze[i].updown_dir;
7249 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7250 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7251 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7252 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007253
7254 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7255 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7256 "left_right_gaze=%d, top_bottom_gaze=%d",
7257 faceDetectionInfo->frame_id, i, angle[i],
7258 direction[3 * i], direction[3 * i + 1],
7259 direction[3 * i + 2],
7260 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007261 }
7262 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7263 (uint8_t *)angle, numFaces);
7264 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7265 direction, numFaces * 3);
7266 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7267 (uint8_t *)degree, numFaces * 2);
7268 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007269 }
7270 }
7271 }
7272 }
7273
7274 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7275 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007276 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007277 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007278 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007279
Shuzhen Wang14415f52016-11-16 18:26:18 -08007280 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7281 histogramBins = *histBins;
7282 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7283 }
7284
7285 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007286 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7287 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007288 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007289
7290 switch (stats_data->type) {
7291 case CAM_HISTOGRAM_TYPE_BAYER:
7292 switch (stats_data->bayer_stats.data_type) {
7293 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007294 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7295 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007296 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007297 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7298 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007299 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007300 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7301 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007302 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007303 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007304 case CAM_STATS_CHANNEL_R:
7305 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007306 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7307 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007308 }
7309 break;
7310 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007311 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007312 break;
7313 }
7314
Shuzhen Wang14415f52016-11-16 18:26:18 -08007315 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007316 }
7317 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007318 }
7319
7320 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7321 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7322 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7323 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7324 }
7325
7326 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7327 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7328 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7329 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7330 }
7331
7332 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7333 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7334 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7335 CAM_MAX_SHADING_MAP_HEIGHT);
7336 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7337 CAM_MAX_SHADING_MAP_WIDTH);
7338 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7339 lensShadingMap->lens_shading, 4U * map_width * map_height);
7340 }
7341
7342 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7343 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7344 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7345 }
7346
7347 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7348 //Populate CAM_INTF_META_TONEMAP_CURVES
7349 /* ch0 = G, ch 1 = B, ch 2 = R*/
7350 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7351 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7352 tonemap->tonemap_points_cnt,
7353 CAM_MAX_TONEMAP_CURVE_SIZE);
7354 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7355 }
7356
7357 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7358 &tonemap->curves[0].tonemap_points[0][0],
7359 tonemap->tonemap_points_cnt * 2);
7360
7361 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7362 &tonemap->curves[1].tonemap_points[0][0],
7363 tonemap->tonemap_points_cnt * 2);
7364
7365 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7366 &tonemap->curves[2].tonemap_points[0][0],
7367 tonemap->tonemap_points_cnt * 2);
7368 }
7369
7370 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7371 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7372 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7373 CC_GAIN_MAX);
7374 }
7375
7376 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7377 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7378 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7379 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7380 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7381 }
7382
7383 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7384 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7385 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7386 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7387 toneCurve->tonemap_points_cnt,
7388 CAM_MAX_TONEMAP_CURVE_SIZE);
7389 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7390 }
7391 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7392 (float*)toneCurve->curve.tonemap_points,
7393 toneCurve->tonemap_points_cnt * 2);
7394 }
7395
7396 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7397 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7398 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7399 predColorCorrectionGains->gains, 4);
7400 }
7401
7402 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7403 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7404 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7405 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7406 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7407 }
7408
7409 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7410 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7411 }
7412
7413 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7414 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7415 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7416 }
7417
7418 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7419 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7420 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7421 }
7422
7423 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7424 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7425 *effectMode);
7426 if (NAME_NOT_FOUND != val) {
7427 uint8_t fwk_effectMode = (uint8_t)val;
7428 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7429 }
7430 }
7431
7432 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7433 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7434 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7435 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7436 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7437 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7438 }
7439 int32_t fwk_testPatternData[4];
7440 fwk_testPatternData[0] = testPatternData->r;
7441 fwk_testPatternData[3] = testPatternData->b;
7442 switch (gCamCapability[mCameraId]->color_arrangement) {
7443 case CAM_FILTER_ARRANGEMENT_RGGB:
7444 case CAM_FILTER_ARRANGEMENT_GRBG:
7445 fwk_testPatternData[1] = testPatternData->gr;
7446 fwk_testPatternData[2] = testPatternData->gb;
7447 break;
7448 case CAM_FILTER_ARRANGEMENT_GBRG:
7449 case CAM_FILTER_ARRANGEMENT_BGGR:
7450 fwk_testPatternData[2] = testPatternData->gr;
7451 fwk_testPatternData[1] = testPatternData->gb;
7452 break;
7453 default:
7454 LOGE("color arrangement %d is not supported",
7455 gCamCapability[mCameraId]->color_arrangement);
7456 break;
7457 }
7458 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7459 }
7460
7461 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7462 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7463 }
7464
7465 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7466 String8 str((const char *)gps_methods);
7467 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7468 }
7469
7470 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7471 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7472 }
7473
7474 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7475 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7476 }
7477
7478 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7479 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7480 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7481 }
7482
7483 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7484 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7485 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7486 }
7487
7488 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7489 int32_t fwk_thumb_size[2];
7490 fwk_thumb_size[0] = thumb_size->width;
7491 fwk_thumb_size[1] = thumb_size->height;
7492 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7493 }
7494
7495 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7496 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7497 privateData,
7498 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7499 }
7500
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007501 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007502 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007503 meteringMode, 1);
7504 }
7505
Thierry Strudel54dc9782017-02-15 12:12:10 -08007506 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7507 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7508 LOGD("hdr_scene_data: %d %f\n",
7509 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7510 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7511 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7512 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7513 &isHdr, 1);
7514 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7515 &isHdrConfidence, 1);
7516 }
7517
7518
7519
Thierry Strudel3d639192016-09-09 11:52:26 -07007520 if (metadata->is_tuning_params_valid) {
7521 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7522 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7523 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7524
7525
7526 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7527 sizeof(uint32_t));
7528 data += sizeof(uint32_t);
7529
7530 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7531 sizeof(uint32_t));
7532 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7533 data += sizeof(uint32_t);
7534
7535 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7536 sizeof(uint32_t));
7537 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7538 data += sizeof(uint32_t);
7539
7540 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7541 sizeof(uint32_t));
7542 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7543 data += sizeof(uint32_t);
7544
7545 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7546 sizeof(uint32_t));
7547 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7548 data += sizeof(uint32_t);
7549
7550 metadata->tuning_params.tuning_mod3_data_size = 0;
7551 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7552 sizeof(uint32_t));
7553 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7554 data += sizeof(uint32_t);
7555
7556 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7557 TUNING_SENSOR_DATA_MAX);
7558 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7559 count);
7560 data += count;
7561
7562 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7563 TUNING_VFE_DATA_MAX);
7564 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7565 count);
7566 data += count;
7567
7568 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7569 TUNING_CPP_DATA_MAX);
7570 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7571 count);
7572 data += count;
7573
7574 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7575 TUNING_CAC_DATA_MAX);
7576 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7577 count);
7578 data += count;
7579
7580 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7581 (int32_t *)(void *)tuning_meta_data_blob,
7582 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7583 }
7584
7585 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7586 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7587 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7588 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7589 NEUTRAL_COL_POINTS);
7590 }
7591
7592 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7593 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7594 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7595 }
7596
7597 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7598 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7599 // Adjust crop region from sensor output coordinate system to active
7600 // array coordinate system.
7601 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7602 hAeRegions->rect.width, hAeRegions->rect.height);
7603
7604 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7605 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7606 REGIONS_TUPLE_COUNT);
7607 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7608 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7609 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7610 hAeRegions->rect.height);
7611 }
7612
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007613 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7614 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7615 if (NAME_NOT_FOUND != val) {
7616 uint8_t fwkAfMode = (uint8_t)val;
7617 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7618 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7619 } else {
7620 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7621 val);
7622 }
7623 }
7624
Thierry Strudel3d639192016-09-09 11:52:26 -07007625 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7626 uint8_t fwk_afState = (uint8_t) *afState;
7627 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007628 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007629 }
7630
7631 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7632 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7633 }
7634
7635 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7636 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7637 }
7638
7639 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7640 uint8_t fwk_lensState = *lensState;
7641 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7642 }
7643
Thierry Strudel3d639192016-09-09 11:52:26 -07007644
7645 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007646 uint32_t ab_mode = *hal_ab_mode;
7647 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7648 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7649 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7650 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007651 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007652 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007653 if (NAME_NOT_FOUND != val) {
7654 uint8_t fwk_ab_mode = (uint8_t)val;
7655 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7656 }
7657 }
7658
7659 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7660 int val = lookupFwkName(SCENE_MODES_MAP,
7661 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7662 if (NAME_NOT_FOUND != val) {
7663 uint8_t fwkBestshotMode = (uint8_t)val;
7664 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7665 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7666 } else {
7667 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7668 }
7669 }
7670
7671 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7672 uint8_t fwk_mode = (uint8_t) *mode;
7673 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7674 }
7675
7676 /* Constant metadata values to be update*/
7677 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7678 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7679
7680 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7681 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7682
7683 int32_t hotPixelMap[2];
7684 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7685
7686 // CDS
7687 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7688 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7689 }
7690
Thierry Strudel04e026f2016-10-10 11:27:36 -07007691 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7692 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007693 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007694 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7695 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7696 } else {
7697 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7698 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007699
7700 if(fwk_hdr != curr_hdr_state) {
7701 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7702 if(fwk_hdr)
7703 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7704 else
7705 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7706 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007707 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7708 }
7709
Thierry Strudel54dc9782017-02-15 12:12:10 -08007710 //binning correction
7711 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7712 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7713 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7714 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7715 }
7716
Thierry Strudel04e026f2016-10-10 11:27:36 -07007717 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007718 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007719 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7720 int8_t is_ir_on = 0;
7721
7722 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7723 if(is_ir_on != curr_ir_state) {
7724 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7725 if(is_ir_on)
7726 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7727 else
7728 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7729 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007730 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007731 }
7732
Thierry Strudel269c81a2016-10-12 12:13:59 -07007733 // AEC SPEED
7734 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7735 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7736 }
7737
7738 // AWB SPEED
7739 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7740 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7741 }
7742
Thierry Strudel3d639192016-09-09 11:52:26 -07007743 // TNR
7744 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7745 uint8_t tnr_enable = tnr->denoise_enable;
7746 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007747 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7748 int8_t is_tnr_on = 0;
7749
7750 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7751 if(is_tnr_on != curr_tnr_state) {
7752 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7753 if(is_tnr_on)
7754 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7755 else
7756 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7757 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007758
7759 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7760 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7761 }
7762
7763 // Reprocess crop data
7764 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7765 uint8_t cnt = crop_data->num_of_streams;
7766 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7767 // mm-qcamera-daemon only posts crop_data for streams
7768 // not linked to pproc. So no valid crop metadata is not
7769 // necessarily an error case.
7770 LOGD("No valid crop metadata entries");
7771 } else {
7772 uint32_t reproc_stream_id;
7773 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7774 LOGD("No reprocessible stream found, ignore crop data");
7775 } else {
7776 int rc = NO_ERROR;
7777 Vector<int32_t> roi_map;
7778 int32_t *crop = new int32_t[cnt*4];
7779 if (NULL == crop) {
7780 rc = NO_MEMORY;
7781 }
7782 if (NO_ERROR == rc) {
7783 int32_t streams_found = 0;
7784 for (size_t i = 0; i < cnt; i++) {
7785 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7786 if (pprocDone) {
7787 // HAL already does internal reprocessing,
7788 // either via reprocessing before JPEG encoding,
7789 // or offline postprocessing for pproc bypass case.
7790 crop[0] = 0;
7791 crop[1] = 0;
7792 crop[2] = mInputStreamInfo.dim.width;
7793 crop[3] = mInputStreamInfo.dim.height;
7794 } else {
7795 crop[0] = crop_data->crop_info[i].crop.left;
7796 crop[1] = crop_data->crop_info[i].crop.top;
7797 crop[2] = crop_data->crop_info[i].crop.width;
7798 crop[3] = crop_data->crop_info[i].crop.height;
7799 }
7800 roi_map.add(crop_data->crop_info[i].roi_map.left);
7801 roi_map.add(crop_data->crop_info[i].roi_map.top);
7802 roi_map.add(crop_data->crop_info[i].roi_map.width);
7803 roi_map.add(crop_data->crop_info[i].roi_map.height);
7804 streams_found++;
7805 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7806 crop[0], crop[1], crop[2], crop[3]);
7807 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7808 crop_data->crop_info[i].roi_map.left,
7809 crop_data->crop_info[i].roi_map.top,
7810 crop_data->crop_info[i].roi_map.width,
7811 crop_data->crop_info[i].roi_map.height);
7812 break;
7813
7814 }
7815 }
7816 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7817 &streams_found, 1);
7818 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7819 crop, (size_t)(streams_found * 4));
7820 if (roi_map.array()) {
7821 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7822 roi_map.array(), roi_map.size());
7823 }
7824 }
7825 if (crop) {
7826 delete [] crop;
7827 }
7828 }
7829 }
7830 }
7831
7832 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7833 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7834 // so hardcoding the CAC result to OFF mode.
7835 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7836 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7837 } else {
7838 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7839 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7840 *cacMode);
7841 if (NAME_NOT_FOUND != val) {
7842 uint8_t resultCacMode = (uint8_t)val;
7843 // check whether CAC result from CB is equal to Framework set CAC mode
7844 // If not equal then set the CAC mode came in corresponding request
7845 if (fwk_cacMode != resultCacMode) {
7846 resultCacMode = fwk_cacMode;
7847 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007848 //Check if CAC is disabled by property
7849 if (m_cacModeDisabled) {
7850 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7851 }
7852
Thierry Strudel3d639192016-09-09 11:52:26 -07007853 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7854 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7855 } else {
7856 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7857 }
7858 }
7859 }
7860
7861 // Post blob of cam_cds_data through vendor tag.
7862 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7863 uint8_t cnt = cdsInfo->num_of_streams;
7864 cam_cds_data_t cdsDataOverride;
7865 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7866 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7867 cdsDataOverride.num_of_streams = 1;
7868 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7869 uint32_t reproc_stream_id;
7870 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7871 LOGD("No reprocessible stream found, ignore cds data");
7872 } else {
7873 for (size_t i = 0; i < cnt; i++) {
7874 if (cdsInfo->cds_info[i].stream_id ==
7875 reproc_stream_id) {
7876 cdsDataOverride.cds_info[0].cds_enable =
7877 cdsInfo->cds_info[i].cds_enable;
7878 break;
7879 }
7880 }
7881 }
7882 } else {
7883 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7884 }
7885 camMetadata.update(QCAMERA3_CDS_INFO,
7886 (uint8_t *)&cdsDataOverride,
7887 sizeof(cam_cds_data_t));
7888 }
7889
7890 // Ldaf calibration data
7891 if (!mLdafCalibExist) {
7892 IF_META_AVAILABLE(uint32_t, ldafCalib,
7893 CAM_INTF_META_LDAF_EXIF, metadata) {
7894 mLdafCalibExist = true;
7895 mLdafCalib[0] = ldafCalib[0];
7896 mLdafCalib[1] = ldafCalib[1];
7897 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7898 ldafCalib[0], ldafCalib[1]);
7899 }
7900 }
7901
Thierry Strudel54dc9782017-02-15 12:12:10 -08007902 // EXIF debug data through vendor tag
7903 /*
7904 * Mobicat Mask can assume 3 values:
7905 * 1 refers to Mobicat data,
7906 * 2 refers to Stats Debug and Exif Debug Data
7907 * 3 refers to Mobicat and Stats Debug Data
7908 * We want to make sure that we are sending Exif debug data
7909 * only when Mobicat Mask is 2.
7910 */
7911 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7912 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7913 (uint8_t *)(void *)mExifParams.debug_params,
7914 sizeof(mm_jpeg_debug_exif_params_t));
7915 }
7916
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007917 // Reprocess and DDM debug data through vendor tag
7918 cam_reprocess_info_t repro_info;
7919 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007920 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7921 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007922 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007923 }
7924 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7925 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007926 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007927 }
7928 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7929 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007930 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007931 }
7932 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7933 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007934 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007935 }
7936 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7937 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007938 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007939 }
7940 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007941 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007942 }
7943 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7944 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007945 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007946 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007947 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7948 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7949 }
7950 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7951 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7952 }
7953 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7954 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007955
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007956 // INSTANT AEC MODE
7957 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7958 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7959 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7960 }
7961
Shuzhen Wange763e802016-03-31 10:24:29 -07007962 // AF scene change
7963 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7964 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7965 }
7966
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007967 // Enable ZSL
7968 if (enableZsl != nullptr) {
7969 uint8_t value = *enableZsl ?
7970 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7971 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7972 }
7973
Xu Han821ea9c2017-05-23 09:00:40 -07007974 // OIS Data
7975 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
7976 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
7977 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
7978 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
7979 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
7980 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
7981 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
7982 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
7983 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
7984 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
7985 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
7986 }
7987
Thierry Strudel3d639192016-09-09 11:52:26 -07007988 resultMetadata = camMetadata.release();
7989 return resultMetadata;
7990}
7991
7992/*===========================================================================
7993 * FUNCTION : saveExifParams
7994 *
7995 * DESCRIPTION:
7996 *
7997 * PARAMETERS :
7998 * @metadata : metadata information from callback
7999 *
8000 * RETURN : none
8001 *
8002 *==========================================================================*/
8003void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8004{
8005 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8006 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8007 if (mExifParams.debug_params) {
8008 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8009 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8010 }
8011 }
8012 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8013 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8014 if (mExifParams.debug_params) {
8015 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8016 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8017 }
8018 }
8019 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8020 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8021 if (mExifParams.debug_params) {
8022 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8023 mExifParams.debug_params->af_debug_params_valid = TRUE;
8024 }
8025 }
8026 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8027 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8028 if (mExifParams.debug_params) {
8029 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8030 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8031 }
8032 }
8033 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8034 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8035 if (mExifParams.debug_params) {
8036 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8037 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8038 }
8039 }
8040 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8041 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8042 if (mExifParams.debug_params) {
8043 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8044 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8045 }
8046 }
8047 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8048 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8049 if (mExifParams.debug_params) {
8050 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8051 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8052 }
8053 }
8054 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8055 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8056 if (mExifParams.debug_params) {
8057 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8058 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8059 }
8060 }
8061}
8062
8063/*===========================================================================
8064 * FUNCTION : get3AExifParams
8065 *
8066 * DESCRIPTION:
8067 *
8068 * PARAMETERS : none
8069 *
8070 *
8071 * RETURN : mm_jpeg_exif_params_t
8072 *
8073 *==========================================================================*/
8074mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8075{
8076 return mExifParams;
8077}
8078
8079/*===========================================================================
8080 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8081 *
8082 * DESCRIPTION:
8083 *
8084 * PARAMETERS :
8085 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008086 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8087 * urgent metadata in a batch. Always true for
8088 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008089 *
8090 * RETURN : camera_metadata_t*
8091 * metadata in a format specified by fwk
8092 *==========================================================================*/
8093camera_metadata_t*
8094QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008095 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008096{
8097 CameraMetadata camMetadata;
8098 camera_metadata_t *resultMetadata;
8099
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008100 if (!lastUrgentMetadataInBatch) {
8101 /* In batch mode, use empty metadata if this is not the last in batch
8102 */
8103 resultMetadata = allocate_camera_metadata(0, 0);
8104 return resultMetadata;
8105 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008106
8107 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8108 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8109 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8110 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8111 }
8112
8113 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8114 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8115 &aecTrigger->trigger, 1);
8116 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8117 &aecTrigger->trigger_id, 1);
8118 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8119 aecTrigger->trigger);
8120 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8121 aecTrigger->trigger_id);
8122 }
8123
8124 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8125 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8126 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8127 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8128 }
8129
Thierry Strudel3d639192016-09-09 11:52:26 -07008130 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8131 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8132 &af_trigger->trigger, 1);
8133 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8134 af_trigger->trigger);
8135 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8136 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8137 af_trigger->trigger_id);
8138 }
8139
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008140 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8141 /*af regions*/
8142 int32_t afRegions[REGIONS_TUPLE_COUNT];
8143 // Adjust crop region from sensor output coordinate system to active
8144 // array coordinate system.
8145 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8146 hAfRegions->rect.width, hAfRegions->rect.height);
8147
8148 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8149 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8150 REGIONS_TUPLE_COUNT);
8151 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8152 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8153 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8154 hAfRegions->rect.height);
8155 }
8156
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008157 // AF region confidence
8158 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8159 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8160 }
8161
Thierry Strudel3d639192016-09-09 11:52:26 -07008162 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8163 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8164 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8165 if (NAME_NOT_FOUND != val) {
8166 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8167 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8168 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8169 } else {
8170 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8171 }
8172 }
8173
8174 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8175 uint32_t aeMode = CAM_AE_MODE_MAX;
8176 int32_t flashMode = CAM_FLASH_MODE_MAX;
8177 int32_t redeye = -1;
8178 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8179 aeMode = *pAeMode;
8180 }
8181 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8182 flashMode = *pFlashMode;
8183 }
8184 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8185 redeye = *pRedeye;
8186 }
8187
8188 if (1 == redeye) {
8189 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8190 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8191 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8192 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8193 flashMode);
8194 if (NAME_NOT_FOUND != val) {
8195 fwk_aeMode = (uint8_t)val;
8196 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8197 } else {
8198 LOGE("Unsupported flash mode %d", flashMode);
8199 }
8200 } else if (aeMode == CAM_AE_MODE_ON) {
8201 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8202 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8203 } else if (aeMode == CAM_AE_MODE_OFF) {
8204 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8205 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008206 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8207 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8208 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008209 } else {
8210 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8211 "flashMode:%d, aeMode:%u!!!",
8212 redeye, flashMode, aeMode);
8213 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008214 if (mInstantAEC) {
8215 // Increment frame Idx count untill a bound reached for instant AEC.
8216 mInstantAecFrameIdxCount++;
8217 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8218 CAM_INTF_META_AEC_INFO, metadata) {
8219 LOGH("ae_params->settled = %d",ae_params->settled);
8220 // If AEC settled, or if number of frames reached bound value,
8221 // should reset instant AEC.
8222 if (ae_params->settled ||
8223 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8224 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8225 mInstantAEC = false;
8226 mResetInstantAEC = true;
8227 mInstantAecFrameIdxCount = 0;
8228 }
8229 }
8230 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008231 resultMetadata = camMetadata.release();
8232 return resultMetadata;
8233}
8234
8235/*===========================================================================
8236 * FUNCTION : dumpMetadataToFile
8237 *
8238 * DESCRIPTION: Dumps tuning metadata to file system
8239 *
8240 * PARAMETERS :
8241 * @meta : tuning metadata
8242 * @dumpFrameCount : current dump frame count
8243 * @enabled : Enable mask
8244 *
8245 *==========================================================================*/
8246void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8247 uint32_t &dumpFrameCount,
8248 bool enabled,
8249 const char *type,
8250 uint32_t frameNumber)
8251{
8252 //Some sanity checks
8253 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8254 LOGE("Tuning sensor data size bigger than expected %d: %d",
8255 meta.tuning_sensor_data_size,
8256 TUNING_SENSOR_DATA_MAX);
8257 return;
8258 }
8259
8260 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8261 LOGE("Tuning VFE data size bigger than expected %d: %d",
8262 meta.tuning_vfe_data_size,
8263 TUNING_VFE_DATA_MAX);
8264 return;
8265 }
8266
8267 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8268 LOGE("Tuning CPP data size bigger than expected %d: %d",
8269 meta.tuning_cpp_data_size,
8270 TUNING_CPP_DATA_MAX);
8271 return;
8272 }
8273
8274 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8275 LOGE("Tuning CAC data size bigger than expected %d: %d",
8276 meta.tuning_cac_data_size,
8277 TUNING_CAC_DATA_MAX);
8278 return;
8279 }
8280 //
8281
8282 if(enabled){
8283 char timeBuf[FILENAME_MAX];
8284 char buf[FILENAME_MAX];
8285 memset(buf, 0, sizeof(buf));
8286 memset(timeBuf, 0, sizeof(timeBuf));
8287 time_t current_time;
8288 struct tm * timeinfo;
8289 time (&current_time);
8290 timeinfo = localtime (&current_time);
8291 if (timeinfo != NULL) {
8292 strftime (timeBuf, sizeof(timeBuf),
8293 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8294 }
8295 String8 filePath(timeBuf);
8296 snprintf(buf,
8297 sizeof(buf),
8298 "%dm_%s_%d.bin",
8299 dumpFrameCount,
8300 type,
8301 frameNumber);
8302 filePath.append(buf);
8303 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8304 if (file_fd >= 0) {
8305 ssize_t written_len = 0;
8306 meta.tuning_data_version = TUNING_DATA_VERSION;
8307 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8308 written_len += write(file_fd, data, sizeof(uint32_t));
8309 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8310 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8311 written_len += write(file_fd, data, sizeof(uint32_t));
8312 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8313 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8314 written_len += write(file_fd, data, sizeof(uint32_t));
8315 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8316 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8317 written_len += write(file_fd, data, sizeof(uint32_t));
8318 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8319 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8320 written_len += write(file_fd, data, sizeof(uint32_t));
8321 meta.tuning_mod3_data_size = 0;
8322 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8323 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8324 written_len += write(file_fd, data, sizeof(uint32_t));
8325 size_t total_size = meta.tuning_sensor_data_size;
8326 data = (void *)((uint8_t *)&meta.data);
8327 written_len += write(file_fd, data, total_size);
8328 total_size = meta.tuning_vfe_data_size;
8329 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8330 written_len += write(file_fd, data, total_size);
8331 total_size = meta.tuning_cpp_data_size;
8332 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8333 written_len += write(file_fd, data, total_size);
8334 total_size = meta.tuning_cac_data_size;
8335 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8336 written_len += write(file_fd, data, total_size);
8337 close(file_fd);
8338 }else {
8339 LOGE("fail to open file for metadata dumping");
8340 }
8341 }
8342}
8343
8344/*===========================================================================
8345 * FUNCTION : cleanAndSortStreamInfo
8346 *
8347 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8348 * and sort them such that raw stream is at the end of the list
8349 * This is a workaround for camera daemon constraint.
8350 *
8351 * PARAMETERS : None
8352 *
8353 *==========================================================================*/
8354void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8355{
8356 List<stream_info_t *> newStreamInfo;
8357
8358 /*clean up invalid streams*/
8359 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8360 it != mStreamInfo.end();) {
8361 if(((*it)->status) == INVALID){
8362 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8363 delete channel;
8364 free(*it);
8365 it = mStreamInfo.erase(it);
8366 } else {
8367 it++;
8368 }
8369 }
8370
8371 // Move preview/video/callback/snapshot streams into newList
8372 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8373 it != mStreamInfo.end();) {
8374 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8375 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8376 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8377 newStreamInfo.push_back(*it);
8378 it = mStreamInfo.erase(it);
8379 } else
8380 it++;
8381 }
8382 // Move raw streams into newList
8383 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8384 it != mStreamInfo.end();) {
8385 newStreamInfo.push_back(*it);
8386 it = mStreamInfo.erase(it);
8387 }
8388
8389 mStreamInfo = newStreamInfo;
8390}
8391
8392/*===========================================================================
8393 * FUNCTION : extractJpegMetadata
8394 *
8395 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8396 * JPEG metadata is cached in HAL, and return as part of capture
8397 * result when metadata is returned from camera daemon.
8398 *
8399 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8400 * @request: capture request
8401 *
8402 *==========================================================================*/
8403void QCamera3HardwareInterface::extractJpegMetadata(
8404 CameraMetadata& jpegMetadata,
8405 const camera3_capture_request_t *request)
8406{
8407 CameraMetadata frame_settings;
8408 frame_settings = request->settings;
8409
8410 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8411 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8412 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8413 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8414
8415 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8416 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8417 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8418 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8419
8420 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8421 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8422 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8423 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8424
8425 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8426 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8427 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8428 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8429
8430 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8431 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8432 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8433 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8434
8435 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8436 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8437 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8438 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8439
8440 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8441 int32_t thumbnail_size[2];
8442 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8443 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8444 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8445 int32_t orientation =
8446 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008447 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008448 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8449 int32_t temp;
8450 temp = thumbnail_size[0];
8451 thumbnail_size[0] = thumbnail_size[1];
8452 thumbnail_size[1] = temp;
8453 }
8454 }
8455 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8456 thumbnail_size,
8457 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8458 }
8459
8460}
8461
8462/*===========================================================================
8463 * FUNCTION : convertToRegions
8464 *
8465 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8466 *
8467 * PARAMETERS :
8468 * @rect : cam_rect_t struct to convert
8469 * @region : int32_t destination array
8470 * @weight : if we are converting from cam_area_t, weight is valid
8471 * else weight = -1
8472 *
8473 *==========================================================================*/
8474void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8475 int32_t *region, int weight)
8476{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008477 region[FACE_LEFT] = rect.left;
8478 region[FACE_TOP] = rect.top;
8479 region[FACE_RIGHT] = rect.left + rect.width;
8480 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008481 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008482 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008483 }
8484}
8485
8486/*===========================================================================
8487 * FUNCTION : convertFromRegions
8488 *
8489 * DESCRIPTION: helper method to convert from array to cam_rect_t
8490 *
8491 * PARAMETERS :
8492 * @rect : cam_rect_t struct to convert
8493 * @region : int32_t destination array
8494 * @weight : if we are converting from cam_area_t, weight is valid
8495 * else weight = -1
8496 *
8497 *==========================================================================*/
8498void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008499 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008500{
Thierry Strudel3d639192016-09-09 11:52:26 -07008501 int32_t x_min = frame_settings.find(tag).data.i32[0];
8502 int32_t y_min = frame_settings.find(tag).data.i32[1];
8503 int32_t x_max = frame_settings.find(tag).data.i32[2];
8504 int32_t y_max = frame_settings.find(tag).data.i32[3];
8505 roi.weight = frame_settings.find(tag).data.i32[4];
8506 roi.rect.left = x_min;
8507 roi.rect.top = y_min;
8508 roi.rect.width = x_max - x_min;
8509 roi.rect.height = y_max - y_min;
8510}
8511
8512/*===========================================================================
8513 * FUNCTION : resetIfNeededROI
8514 *
8515 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8516 * crop region
8517 *
8518 * PARAMETERS :
8519 * @roi : cam_area_t struct to resize
8520 * @scalerCropRegion : cam_crop_region_t region to compare against
8521 *
8522 *
8523 *==========================================================================*/
8524bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8525 const cam_crop_region_t* scalerCropRegion)
8526{
8527 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8528 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8529 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8530 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8531
8532 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8533 * without having this check the calculations below to validate if the roi
8534 * is inside scalar crop region will fail resulting in the roi not being
8535 * reset causing algorithm to continue to use stale roi window
8536 */
8537 if (roi->weight == 0) {
8538 return true;
8539 }
8540
8541 if ((roi_x_max < scalerCropRegion->left) ||
8542 // right edge of roi window is left of scalar crop's left edge
8543 (roi_y_max < scalerCropRegion->top) ||
8544 // bottom edge of roi window is above scalar crop's top edge
8545 (roi->rect.left > crop_x_max) ||
8546 // left edge of roi window is beyond(right) of scalar crop's right edge
8547 (roi->rect.top > crop_y_max)){
8548 // top edge of roi windo is above scalar crop's top edge
8549 return false;
8550 }
8551 if (roi->rect.left < scalerCropRegion->left) {
8552 roi->rect.left = scalerCropRegion->left;
8553 }
8554 if (roi->rect.top < scalerCropRegion->top) {
8555 roi->rect.top = scalerCropRegion->top;
8556 }
8557 if (roi_x_max > crop_x_max) {
8558 roi_x_max = crop_x_max;
8559 }
8560 if (roi_y_max > crop_y_max) {
8561 roi_y_max = crop_y_max;
8562 }
8563 roi->rect.width = roi_x_max - roi->rect.left;
8564 roi->rect.height = roi_y_max - roi->rect.top;
8565 return true;
8566}
8567
8568/*===========================================================================
8569 * FUNCTION : convertLandmarks
8570 *
8571 * DESCRIPTION: helper method to extract the landmarks from face detection info
8572 *
8573 * PARAMETERS :
8574 * @landmark_data : input landmark data to be converted
8575 * @landmarks : int32_t destination array
8576 *
8577 *
8578 *==========================================================================*/
8579void QCamera3HardwareInterface::convertLandmarks(
8580 cam_face_landmarks_info_t landmark_data,
8581 int32_t *landmarks)
8582{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008583 if (landmark_data.is_left_eye_valid) {
8584 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8585 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8586 } else {
8587 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8588 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8589 }
8590
8591 if (landmark_data.is_right_eye_valid) {
8592 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8593 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8594 } else {
8595 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8596 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8597 }
8598
8599 if (landmark_data.is_mouth_valid) {
8600 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8601 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8602 } else {
8603 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8604 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8605 }
8606}
8607
8608/*===========================================================================
8609 * FUNCTION : setInvalidLandmarks
8610 *
8611 * DESCRIPTION: helper method to set invalid landmarks
8612 *
8613 * PARAMETERS :
8614 * @landmarks : int32_t destination array
8615 *
8616 *
8617 *==========================================================================*/
8618void QCamera3HardwareInterface::setInvalidLandmarks(
8619 int32_t *landmarks)
8620{
8621 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8622 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8623 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8624 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8625 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8626 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008627}
8628
8629#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008630
8631/*===========================================================================
8632 * FUNCTION : getCapabilities
8633 *
8634 * DESCRIPTION: query camera capability from back-end
8635 *
8636 * PARAMETERS :
8637 * @ops : mm-interface ops structure
8638 * @cam_handle : camera handle for which we need capability
8639 *
8640 * RETURN : ptr type of capability structure
8641 * capability for success
8642 * NULL for failure
8643 *==========================================================================*/
8644cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8645 uint32_t cam_handle)
8646{
8647 int rc = NO_ERROR;
8648 QCamera3HeapMemory *capabilityHeap = NULL;
8649 cam_capability_t *cap_ptr = NULL;
8650
8651 if (ops == NULL) {
8652 LOGE("Invalid arguments");
8653 return NULL;
8654 }
8655
8656 capabilityHeap = new QCamera3HeapMemory(1);
8657 if (capabilityHeap == NULL) {
8658 LOGE("creation of capabilityHeap failed");
8659 return NULL;
8660 }
8661
8662 /* Allocate memory for capability buffer */
8663 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8664 if(rc != OK) {
8665 LOGE("No memory for cappability");
8666 goto allocate_failed;
8667 }
8668
8669 /* Map memory for capability buffer */
8670 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8671
8672 rc = ops->map_buf(cam_handle,
8673 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8674 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8675 if(rc < 0) {
8676 LOGE("failed to map capability buffer");
8677 rc = FAILED_TRANSACTION;
8678 goto map_failed;
8679 }
8680
8681 /* Query Capability */
8682 rc = ops->query_capability(cam_handle);
8683 if(rc < 0) {
8684 LOGE("failed to query capability");
8685 rc = FAILED_TRANSACTION;
8686 goto query_failed;
8687 }
8688
8689 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8690 if (cap_ptr == NULL) {
8691 LOGE("out of memory");
8692 rc = NO_MEMORY;
8693 goto query_failed;
8694 }
8695
8696 memset(cap_ptr, 0, sizeof(cam_capability_t));
8697 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8698
8699 int index;
8700 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8701 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8702 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8703 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8704 }
8705
8706query_failed:
8707 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8708map_failed:
8709 capabilityHeap->deallocate();
8710allocate_failed:
8711 delete capabilityHeap;
8712
8713 if (rc != NO_ERROR) {
8714 return NULL;
8715 } else {
8716 return cap_ptr;
8717 }
8718}
8719
Thierry Strudel3d639192016-09-09 11:52:26 -07008720/*===========================================================================
8721 * FUNCTION : initCapabilities
8722 *
8723 * DESCRIPTION: initialize camera capabilities in static data struct
8724 *
8725 * PARAMETERS :
8726 * @cameraId : camera Id
8727 *
8728 * RETURN : int32_t type of status
8729 * NO_ERROR -- success
8730 * none-zero failure code
8731 *==========================================================================*/
8732int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8733{
8734 int rc = 0;
8735 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008736 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008737
8738 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8739 if (rc) {
8740 LOGE("camera_open failed. rc = %d", rc);
8741 goto open_failed;
8742 }
8743 if (!cameraHandle) {
8744 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8745 goto open_failed;
8746 }
8747
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008748 handle = get_main_camera_handle(cameraHandle->camera_handle);
8749 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8750 if (gCamCapability[cameraId] == NULL) {
8751 rc = FAILED_TRANSACTION;
8752 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008753 }
8754
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008755 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008756 if (is_dual_camera_by_idx(cameraId)) {
8757 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8758 gCamCapability[cameraId]->aux_cam_cap =
8759 getCapabilities(cameraHandle->ops, handle);
8760 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8761 rc = FAILED_TRANSACTION;
8762 free(gCamCapability[cameraId]);
8763 goto failed_op;
8764 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008765
8766 // Copy the main camera capability to main_cam_cap struct
8767 gCamCapability[cameraId]->main_cam_cap =
8768 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8769 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8770 LOGE("out of memory");
8771 rc = NO_MEMORY;
8772 goto failed_op;
8773 }
8774 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8775 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008776 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008777failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008778 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8779 cameraHandle = NULL;
8780open_failed:
8781 return rc;
8782}
8783
8784/*==========================================================================
8785 * FUNCTION : get3Aversion
8786 *
8787 * DESCRIPTION: get the Q3A S/W version
8788 *
8789 * PARAMETERS :
8790 * @sw_version: Reference of Q3A structure which will hold version info upon
8791 * return
8792 *
8793 * RETURN : None
8794 *
8795 *==========================================================================*/
8796void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8797{
8798 if(gCamCapability[mCameraId])
8799 sw_version = gCamCapability[mCameraId]->q3a_version;
8800 else
8801 LOGE("Capability structure NULL!");
8802}
8803
8804
8805/*===========================================================================
8806 * FUNCTION : initParameters
8807 *
8808 * DESCRIPTION: initialize camera parameters
8809 *
8810 * PARAMETERS :
8811 *
8812 * RETURN : int32_t type of status
8813 * NO_ERROR -- success
8814 * none-zero failure code
8815 *==========================================================================*/
8816int QCamera3HardwareInterface::initParameters()
8817{
8818 int rc = 0;
8819
8820 //Allocate Set Param Buffer
8821 mParamHeap = new QCamera3HeapMemory(1);
8822 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8823 if(rc != OK) {
8824 rc = NO_MEMORY;
8825 LOGE("Failed to allocate SETPARM Heap memory");
8826 delete mParamHeap;
8827 mParamHeap = NULL;
8828 return rc;
8829 }
8830
8831 //Map memory for parameters buffer
8832 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8833 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8834 mParamHeap->getFd(0),
8835 sizeof(metadata_buffer_t),
8836 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8837 if(rc < 0) {
8838 LOGE("failed to map SETPARM buffer");
8839 rc = FAILED_TRANSACTION;
8840 mParamHeap->deallocate();
8841 delete mParamHeap;
8842 mParamHeap = NULL;
8843 return rc;
8844 }
8845
8846 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8847
8848 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8849 return rc;
8850}
8851
8852/*===========================================================================
8853 * FUNCTION : deinitParameters
8854 *
8855 * DESCRIPTION: de-initialize camera parameters
8856 *
8857 * PARAMETERS :
8858 *
8859 * RETURN : NONE
8860 *==========================================================================*/
8861void QCamera3HardwareInterface::deinitParameters()
8862{
8863 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8864 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8865
8866 mParamHeap->deallocate();
8867 delete mParamHeap;
8868 mParamHeap = NULL;
8869
8870 mParameters = NULL;
8871
8872 free(mPrevParameters);
8873 mPrevParameters = NULL;
8874}
8875
8876/*===========================================================================
8877 * FUNCTION : calcMaxJpegSize
8878 *
8879 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8880 *
8881 * PARAMETERS :
8882 *
8883 * RETURN : max_jpeg_size
8884 *==========================================================================*/
8885size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8886{
8887 size_t max_jpeg_size = 0;
8888 size_t temp_width, temp_height;
8889 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8890 MAX_SIZES_CNT);
8891 for (size_t i = 0; i < count; i++) {
8892 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8893 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8894 if (temp_width * temp_height > max_jpeg_size ) {
8895 max_jpeg_size = temp_width * temp_height;
8896 }
8897 }
8898 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8899 return max_jpeg_size;
8900}
8901
8902/*===========================================================================
8903 * FUNCTION : getMaxRawSize
8904 *
8905 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8906 *
8907 * PARAMETERS :
8908 *
8909 * RETURN : Largest supported Raw Dimension
8910 *==========================================================================*/
8911cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8912{
8913 int max_width = 0;
8914 cam_dimension_t maxRawSize;
8915
8916 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8917 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8918 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8919 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8920 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8921 }
8922 }
8923 return maxRawSize;
8924}
8925
8926
8927/*===========================================================================
8928 * FUNCTION : calcMaxJpegDim
8929 *
8930 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8931 *
8932 * PARAMETERS :
8933 *
8934 * RETURN : max_jpeg_dim
8935 *==========================================================================*/
8936cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8937{
8938 cam_dimension_t max_jpeg_dim;
8939 cam_dimension_t curr_jpeg_dim;
8940 max_jpeg_dim.width = 0;
8941 max_jpeg_dim.height = 0;
8942 curr_jpeg_dim.width = 0;
8943 curr_jpeg_dim.height = 0;
8944 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8945 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8946 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8947 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8948 max_jpeg_dim.width * max_jpeg_dim.height ) {
8949 max_jpeg_dim.width = curr_jpeg_dim.width;
8950 max_jpeg_dim.height = curr_jpeg_dim.height;
8951 }
8952 }
8953 return max_jpeg_dim;
8954}
8955
8956/*===========================================================================
8957 * FUNCTION : addStreamConfig
8958 *
8959 * DESCRIPTION: adds the stream configuration to the array
8960 *
8961 * PARAMETERS :
8962 * @available_stream_configs : pointer to stream configuration array
8963 * @scalar_format : scalar format
8964 * @dim : configuration dimension
8965 * @config_type : input or output configuration type
8966 *
8967 * RETURN : NONE
8968 *==========================================================================*/
8969void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8970 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8971{
8972 available_stream_configs.add(scalar_format);
8973 available_stream_configs.add(dim.width);
8974 available_stream_configs.add(dim.height);
8975 available_stream_configs.add(config_type);
8976}
8977
8978/*===========================================================================
8979 * FUNCTION : suppportBurstCapture
8980 *
8981 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8982 *
8983 * PARAMETERS :
8984 * @cameraId : camera Id
8985 *
8986 * RETURN : true if camera supports BURST_CAPTURE
8987 * false otherwise
8988 *==========================================================================*/
8989bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8990{
8991 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8992 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8993 const int32_t highResWidth = 3264;
8994 const int32_t highResHeight = 2448;
8995
8996 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8997 // Maximum resolution images cannot be captured at >= 10fps
8998 // -> not supporting BURST_CAPTURE
8999 return false;
9000 }
9001
9002 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9003 // Maximum resolution images can be captured at >= 20fps
9004 // --> supporting BURST_CAPTURE
9005 return true;
9006 }
9007
9008 // Find the smallest highRes resolution, or largest resolution if there is none
9009 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9010 MAX_SIZES_CNT);
9011 size_t highRes = 0;
9012 while ((highRes + 1 < totalCnt) &&
9013 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9014 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9015 highResWidth * highResHeight)) {
9016 highRes++;
9017 }
9018 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9019 return true;
9020 } else {
9021 return false;
9022 }
9023}
9024
9025/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009026 * FUNCTION : getPDStatIndex
9027 *
9028 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9029 *
9030 * PARAMETERS :
9031 * @caps : camera capabilities
9032 *
9033 * RETURN : int32_t type
9034 * non-negative - on success
9035 * -1 - on failure
9036 *==========================================================================*/
9037int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9038 if (nullptr == caps) {
9039 return -1;
9040 }
9041
9042 uint32_t metaRawCount = caps->meta_raw_channel_count;
9043 int32_t ret = -1;
9044 for (size_t i = 0; i < metaRawCount; i++) {
9045 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9046 ret = i;
9047 break;
9048 }
9049 }
9050
9051 return ret;
9052}
9053
9054/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009055 * FUNCTION : initStaticMetadata
9056 *
9057 * DESCRIPTION: initialize the static metadata
9058 *
9059 * PARAMETERS :
9060 * @cameraId : camera Id
9061 *
9062 * RETURN : int32_t type of status
9063 * 0 -- success
9064 * non-zero failure code
9065 *==========================================================================*/
9066int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9067{
9068 int rc = 0;
9069 CameraMetadata staticInfo;
9070 size_t count = 0;
9071 bool limitedDevice = false;
9072 char prop[PROPERTY_VALUE_MAX];
9073 bool supportBurst = false;
9074
9075 supportBurst = supportBurstCapture(cameraId);
9076
9077 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9078 * guaranteed or if min fps of max resolution is less than 20 fps, its
9079 * advertised as limited device*/
9080 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9081 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9082 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9083 !supportBurst;
9084
9085 uint8_t supportedHwLvl = limitedDevice ?
9086 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009087#ifndef USE_HAL_3_3
9088 // LEVEL_3 - This device will support level 3.
9089 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9090#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009091 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009092#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009093
9094 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9095 &supportedHwLvl, 1);
9096
9097 bool facingBack = false;
9098 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9099 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9100 facingBack = true;
9101 }
9102 /*HAL 3 only*/
9103 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9104 &gCamCapability[cameraId]->min_focus_distance, 1);
9105
9106 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9107 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9108
9109 /*should be using focal lengths but sensor doesn't provide that info now*/
9110 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9111 &gCamCapability[cameraId]->focal_length,
9112 1);
9113
9114 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9115 gCamCapability[cameraId]->apertures,
9116 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9117
9118 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9119 gCamCapability[cameraId]->filter_densities,
9120 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9121
9122
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009123 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9124 size_t mode_count =
9125 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9126 for (size_t i = 0; i < mode_count; i++) {
9127 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9128 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009129 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009130 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009131
9132 int32_t lens_shading_map_size[] = {
9133 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9134 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9135 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9136 lens_shading_map_size,
9137 sizeof(lens_shading_map_size)/sizeof(int32_t));
9138
9139 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9140 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9141
9142 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9143 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9144
9145 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9146 &gCamCapability[cameraId]->max_frame_duration, 1);
9147
9148 camera_metadata_rational baseGainFactor = {
9149 gCamCapability[cameraId]->base_gain_factor.numerator,
9150 gCamCapability[cameraId]->base_gain_factor.denominator};
9151 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9152 &baseGainFactor, 1);
9153
9154 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9155 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9156
9157 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9158 gCamCapability[cameraId]->pixel_array_size.height};
9159 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9160 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9161
9162 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9163 gCamCapability[cameraId]->active_array_size.top,
9164 gCamCapability[cameraId]->active_array_size.width,
9165 gCamCapability[cameraId]->active_array_size.height};
9166 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9167 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9168
9169 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9170 &gCamCapability[cameraId]->white_level, 1);
9171
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009172 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9173 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9174 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009175 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009176 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009177
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009178#ifndef USE_HAL_3_3
9179 bool hasBlackRegions = false;
9180 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9181 LOGW("black_region_count: %d is bounded to %d",
9182 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9183 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9184 }
9185 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9186 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9187 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9188 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9189 }
9190 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9191 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9192 hasBlackRegions = true;
9193 }
9194#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009195 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9196 &gCamCapability[cameraId]->flash_charge_duration, 1);
9197
9198 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9199 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9200
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009201 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9202 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9203 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009204 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9205 &timestampSource, 1);
9206
Thierry Strudel54dc9782017-02-15 12:12:10 -08009207 //update histogram vendor data
9208 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009209 &gCamCapability[cameraId]->histogram_size, 1);
9210
Thierry Strudel54dc9782017-02-15 12:12:10 -08009211 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009212 &gCamCapability[cameraId]->max_histogram_count, 1);
9213
Shuzhen Wang14415f52016-11-16 18:26:18 -08009214 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9215 //so that app can request fewer number of bins than the maximum supported.
9216 std::vector<int32_t> histBins;
9217 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9218 histBins.push_back(maxHistBins);
9219 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9220 (maxHistBins & 0x1) == 0) {
9221 histBins.push_back(maxHistBins >> 1);
9222 maxHistBins >>= 1;
9223 }
9224 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9225 histBins.data(), histBins.size());
9226
Thierry Strudel3d639192016-09-09 11:52:26 -07009227 int32_t sharpness_map_size[] = {
9228 gCamCapability[cameraId]->sharpness_map_size.width,
9229 gCamCapability[cameraId]->sharpness_map_size.height};
9230
9231 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9232 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9233
9234 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9235 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9236
Emilian Peev0f3c3162017-03-15 12:57:46 +00009237 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9238 if (0 <= indexPD) {
9239 // Advertise PD stats data as part of the Depth capabilities
9240 int32_t depthWidth =
9241 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9242 int32_t depthHeight =
9243 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9244 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9245 assert(0 < depthSamplesCount);
9246 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9247 &depthSamplesCount, 1);
9248
9249 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9250 depthHeight,
9251 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9252 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9253 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9254 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9255 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9256
9257 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9258 depthHeight, 33333333,
9259 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9260 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9261 depthMinDuration,
9262 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9263
9264 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9265 depthHeight, 0,
9266 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9267 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9268 depthStallDuration,
9269 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9270
9271 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9272 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9273 }
9274
Thierry Strudel3d639192016-09-09 11:52:26 -07009275 int32_t scalar_formats[] = {
9276 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9277 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9278 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9279 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9280 HAL_PIXEL_FORMAT_RAW10,
9281 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009282 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9283 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9284 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009285
9286 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9287 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9288 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9289 count, MAX_SIZES_CNT, available_processed_sizes);
9290 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9291 available_processed_sizes, count * 2);
9292
9293 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9294 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9295 makeTable(gCamCapability[cameraId]->raw_dim,
9296 count, MAX_SIZES_CNT, available_raw_sizes);
9297 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9298 available_raw_sizes, count * 2);
9299
9300 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9301 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9302 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9303 count, MAX_SIZES_CNT, available_fps_ranges);
9304 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9305 available_fps_ranges, count * 2);
9306
9307 camera_metadata_rational exposureCompensationStep = {
9308 gCamCapability[cameraId]->exp_compensation_step.numerator,
9309 gCamCapability[cameraId]->exp_compensation_step.denominator};
9310 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9311 &exposureCompensationStep, 1);
9312
9313 Vector<uint8_t> availableVstabModes;
9314 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9315 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009316 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009317 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009318 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009319 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009320 count = IS_TYPE_MAX;
9321 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9322 for (size_t i = 0; i < count; i++) {
9323 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9324 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9325 eisSupported = true;
9326 break;
9327 }
9328 }
9329 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009330 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9331 }
9332 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9333 availableVstabModes.array(), availableVstabModes.size());
9334
9335 /*HAL 1 and HAL 3 common*/
9336 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9337 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9338 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009339 // Cap the max zoom to the max preferred value
9340 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009341 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9342 &maxZoom, 1);
9343
9344 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9345 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9346
9347 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9348 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9349 max3aRegions[2] = 0; /* AF not supported */
9350 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9351 max3aRegions, 3);
9352
9353 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9354 memset(prop, 0, sizeof(prop));
9355 property_get("persist.camera.facedetect", prop, "1");
9356 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9357 LOGD("Support face detection mode: %d",
9358 supportedFaceDetectMode);
9359
9360 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009361 /* support mode should be OFF if max number of face is 0 */
9362 if (maxFaces <= 0) {
9363 supportedFaceDetectMode = 0;
9364 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009365 Vector<uint8_t> availableFaceDetectModes;
9366 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9367 if (supportedFaceDetectMode == 1) {
9368 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9369 } else if (supportedFaceDetectMode == 2) {
9370 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9371 } else if (supportedFaceDetectMode == 3) {
9372 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9373 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9374 } else {
9375 maxFaces = 0;
9376 }
9377 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9378 availableFaceDetectModes.array(),
9379 availableFaceDetectModes.size());
9380 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9381 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009382 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9383 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9384 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009385
9386 int32_t exposureCompensationRange[] = {
9387 gCamCapability[cameraId]->exposure_compensation_min,
9388 gCamCapability[cameraId]->exposure_compensation_max};
9389 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9390 exposureCompensationRange,
9391 sizeof(exposureCompensationRange)/sizeof(int32_t));
9392
9393 uint8_t lensFacing = (facingBack) ?
9394 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9395 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9396
9397 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9398 available_thumbnail_sizes,
9399 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9400
9401 /*all sizes will be clubbed into this tag*/
9402 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9403 /*android.scaler.availableStreamConfigurations*/
9404 Vector<int32_t> available_stream_configs;
9405 cam_dimension_t active_array_dim;
9406 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9407 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009408
9409 /*advertise list of input dimensions supported based on below property.
9410 By default all sizes upto 5MP will be advertised.
9411 Note that the setprop resolution format should be WxH.
9412 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9413 To list all supported sizes, setprop needs to be set with "0x0" */
9414 cam_dimension_t minInputSize = {2592,1944}; //5MP
9415 memset(prop, 0, sizeof(prop));
9416 property_get("persist.camera.input.minsize", prop, "2592x1944");
9417 if (strlen(prop) > 0) {
9418 char *saveptr = NULL;
9419 char *token = strtok_r(prop, "x", &saveptr);
9420 if (token != NULL) {
9421 minInputSize.width = atoi(token);
9422 }
9423 token = strtok_r(NULL, "x", &saveptr);
9424 if (token != NULL) {
9425 minInputSize.height = atoi(token);
9426 }
9427 }
9428
Thierry Strudel3d639192016-09-09 11:52:26 -07009429 /* Add input/output stream configurations for each scalar formats*/
9430 for (size_t j = 0; j < scalar_formats_count; j++) {
9431 switch (scalar_formats[j]) {
9432 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9433 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9434 case HAL_PIXEL_FORMAT_RAW10:
9435 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9436 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9437 addStreamConfig(available_stream_configs, scalar_formats[j],
9438 gCamCapability[cameraId]->raw_dim[i],
9439 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9440 }
9441 break;
9442 case HAL_PIXEL_FORMAT_BLOB:
9443 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9444 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9445 addStreamConfig(available_stream_configs, scalar_formats[j],
9446 gCamCapability[cameraId]->picture_sizes_tbl[i],
9447 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9448 }
9449 break;
9450 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9451 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9452 default:
9453 cam_dimension_t largest_picture_size;
9454 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9455 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9456 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9457 addStreamConfig(available_stream_configs, scalar_formats[j],
9458 gCamCapability[cameraId]->picture_sizes_tbl[i],
9459 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009460 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009461 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9462 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009463 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9464 >= minInputSize.width) || (gCamCapability[cameraId]->
9465 picture_sizes_tbl[i].height >= minInputSize.height)) {
9466 addStreamConfig(available_stream_configs, scalar_formats[j],
9467 gCamCapability[cameraId]->picture_sizes_tbl[i],
9468 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9469 }
9470 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009471 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009472
Thierry Strudel3d639192016-09-09 11:52:26 -07009473 break;
9474 }
9475 }
9476
9477 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9478 available_stream_configs.array(), available_stream_configs.size());
9479 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9480 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9481
9482 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9483 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9484
9485 /* android.scaler.availableMinFrameDurations */
9486 Vector<int64_t> available_min_durations;
9487 for (size_t j = 0; j < scalar_formats_count; j++) {
9488 switch (scalar_formats[j]) {
9489 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9490 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9491 case HAL_PIXEL_FORMAT_RAW10:
9492 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9493 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9494 available_min_durations.add(scalar_formats[j]);
9495 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9496 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9497 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9498 }
9499 break;
9500 default:
9501 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9502 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9503 available_min_durations.add(scalar_formats[j]);
9504 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9505 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9506 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9507 }
9508 break;
9509 }
9510 }
9511 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9512 available_min_durations.array(), available_min_durations.size());
9513
9514 Vector<int32_t> available_hfr_configs;
9515 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9516 int32_t fps = 0;
9517 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9518 case CAM_HFR_MODE_60FPS:
9519 fps = 60;
9520 break;
9521 case CAM_HFR_MODE_90FPS:
9522 fps = 90;
9523 break;
9524 case CAM_HFR_MODE_120FPS:
9525 fps = 120;
9526 break;
9527 case CAM_HFR_MODE_150FPS:
9528 fps = 150;
9529 break;
9530 case CAM_HFR_MODE_180FPS:
9531 fps = 180;
9532 break;
9533 case CAM_HFR_MODE_210FPS:
9534 fps = 210;
9535 break;
9536 case CAM_HFR_MODE_240FPS:
9537 fps = 240;
9538 break;
9539 case CAM_HFR_MODE_480FPS:
9540 fps = 480;
9541 break;
9542 case CAM_HFR_MODE_OFF:
9543 case CAM_HFR_MODE_MAX:
9544 default:
9545 break;
9546 }
9547
9548 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9549 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9550 /* For each HFR frame rate, need to advertise one variable fps range
9551 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9552 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9553 * set by the app. When video recording is started, [120, 120] is
9554 * set. This way sensor configuration does not change when recording
9555 * is started */
9556
9557 /* (width, height, fps_min, fps_max, batch_size_max) */
9558 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9559 j < MAX_SIZES_CNT; j++) {
9560 available_hfr_configs.add(
9561 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9562 available_hfr_configs.add(
9563 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9564 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9565 available_hfr_configs.add(fps);
9566 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9567
9568 /* (width, height, fps_min, fps_max, batch_size_max) */
9569 available_hfr_configs.add(
9570 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9571 available_hfr_configs.add(
9572 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9573 available_hfr_configs.add(fps);
9574 available_hfr_configs.add(fps);
9575 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9576 }
9577 }
9578 }
9579 //Advertise HFR capability only if the property is set
9580 memset(prop, 0, sizeof(prop));
9581 property_get("persist.camera.hal3hfr.enable", prop, "1");
9582 uint8_t hfrEnable = (uint8_t)atoi(prop);
9583
9584 if(hfrEnable && available_hfr_configs.array()) {
9585 staticInfo.update(
9586 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9587 available_hfr_configs.array(), available_hfr_configs.size());
9588 }
9589
9590 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9591 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9592 &max_jpeg_size, 1);
9593
9594 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9595 size_t size = 0;
9596 count = CAM_EFFECT_MODE_MAX;
9597 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9598 for (size_t i = 0; i < count; i++) {
9599 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9600 gCamCapability[cameraId]->supported_effects[i]);
9601 if (NAME_NOT_FOUND != val) {
9602 avail_effects[size] = (uint8_t)val;
9603 size++;
9604 }
9605 }
9606 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9607 avail_effects,
9608 size);
9609
9610 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9611 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9612 size_t supported_scene_modes_cnt = 0;
9613 count = CAM_SCENE_MODE_MAX;
9614 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9615 for (size_t i = 0; i < count; i++) {
9616 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9617 CAM_SCENE_MODE_OFF) {
9618 int val = lookupFwkName(SCENE_MODES_MAP,
9619 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9620 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009621
Thierry Strudel3d639192016-09-09 11:52:26 -07009622 if (NAME_NOT_FOUND != val) {
9623 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9624 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9625 supported_scene_modes_cnt++;
9626 }
9627 }
9628 }
9629 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9630 avail_scene_modes,
9631 supported_scene_modes_cnt);
9632
9633 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9634 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9635 supported_scene_modes_cnt,
9636 CAM_SCENE_MODE_MAX,
9637 scene_mode_overrides,
9638 supported_indexes,
9639 cameraId);
9640
9641 if (supported_scene_modes_cnt == 0) {
9642 supported_scene_modes_cnt = 1;
9643 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9644 }
9645
9646 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9647 scene_mode_overrides, supported_scene_modes_cnt * 3);
9648
9649 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9650 ANDROID_CONTROL_MODE_AUTO,
9651 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9652 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9653 available_control_modes,
9654 3);
9655
9656 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9657 size = 0;
9658 count = CAM_ANTIBANDING_MODE_MAX;
9659 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9660 for (size_t i = 0; i < count; i++) {
9661 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9662 gCamCapability[cameraId]->supported_antibandings[i]);
9663 if (NAME_NOT_FOUND != val) {
9664 avail_antibanding_modes[size] = (uint8_t)val;
9665 size++;
9666 }
9667
9668 }
9669 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9670 avail_antibanding_modes,
9671 size);
9672
9673 uint8_t avail_abberation_modes[] = {
9674 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9675 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9676 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9677 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9678 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9679 if (0 == count) {
9680 // If no aberration correction modes are available for a device, this advertise OFF mode
9681 size = 1;
9682 } else {
9683 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9684 // So, advertize all 3 modes if atleast any one mode is supported as per the
9685 // new M requirement
9686 size = 3;
9687 }
9688 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9689 avail_abberation_modes,
9690 size);
9691
9692 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9693 size = 0;
9694 count = CAM_FOCUS_MODE_MAX;
9695 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9696 for (size_t i = 0; i < count; i++) {
9697 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9698 gCamCapability[cameraId]->supported_focus_modes[i]);
9699 if (NAME_NOT_FOUND != val) {
9700 avail_af_modes[size] = (uint8_t)val;
9701 size++;
9702 }
9703 }
9704 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9705 avail_af_modes,
9706 size);
9707
9708 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9709 size = 0;
9710 count = CAM_WB_MODE_MAX;
9711 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9712 for (size_t i = 0; i < count; i++) {
9713 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9714 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9715 gCamCapability[cameraId]->supported_white_balances[i]);
9716 if (NAME_NOT_FOUND != val) {
9717 avail_awb_modes[size] = (uint8_t)val;
9718 size++;
9719 }
9720 }
9721 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9722 avail_awb_modes,
9723 size);
9724
9725 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9726 count = CAM_FLASH_FIRING_LEVEL_MAX;
9727 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9728 count);
9729 for (size_t i = 0; i < count; i++) {
9730 available_flash_levels[i] =
9731 gCamCapability[cameraId]->supported_firing_levels[i];
9732 }
9733 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9734 available_flash_levels, count);
9735
9736 uint8_t flashAvailable;
9737 if (gCamCapability[cameraId]->flash_available)
9738 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9739 else
9740 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9741 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9742 &flashAvailable, 1);
9743
9744 Vector<uint8_t> avail_ae_modes;
9745 count = CAM_AE_MODE_MAX;
9746 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9747 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009748 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9749 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9750 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9751 }
9752 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009753 }
9754 if (flashAvailable) {
9755 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9756 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9757 }
9758 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9759 avail_ae_modes.array(),
9760 avail_ae_modes.size());
9761
9762 int32_t sensitivity_range[2];
9763 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9764 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9765 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9766 sensitivity_range,
9767 sizeof(sensitivity_range) / sizeof(int32_t));
9768
9769 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9770 &gCamCapability[cameraId]->max_analog_sensitivity,
9771 1);
9772
9773 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9774 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9775 &sensor_orientation,
9776 1);
9777
9778 int32_t max_output_streams[] = {
9779 MAX_STALLING_STREAMS,
9780 MAX_PROCESSED_STREAMS,
9781 MAX_RAW_STREAMS};
9782 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9783 max_output_streams,
9784 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9785
9786 uint8_t avail_leds = 0;
9787 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9788 &avail_leds, 0);
9789
9790 uint8_t focus_dist_calibrated;
9791 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9792 gCamCapability[cameraId]->focus_dist_calibrated);
9793 if (NAME_NOT_FOUND != val) {
9794 focus_dist_calibrated = (uint8_t)val;
9795 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9796 &focus_dist_calibrated, 1);
9797 }
9798
9799 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9800 size = 0;
9801 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9802 MAX_TEST_PATTERN_CNT);
9803 for (size_t i = 0; i < count; i++) {
9804 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9805 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9806 if (NAME_NOT_FOUND != testpatternMode) {
9807 avail_testpattern_modes[size] = testpatternMode;
9808 size++;
9809 }
9810 }
9811 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9812 avail_testpattern_modes,
9813 size);
9814
9815 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9816 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9817 &max_pipeline_depth,
9818 1);
9819
9820 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9821 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9822 &partial_result_count,
9823 1);
9824
9825 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9826 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9827
9828 Vector<uint8_t> available_capabilities;
9829 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9830 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9831 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9832 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9833 if (supportBurst) {
9834 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9835 }
9836 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9837 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9838 if (hfrEnable && available_hfr_configs.array()) {
9839 available_capabilities.add(
9840 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9841 }
9842
9843 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9844 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9845 }
9846 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9847 available_capabilities.array(),
9848 available_capabilities.size());
9849
9850 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9851 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9852 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9853 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9854
9855 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9856 &aeLockAvailable, 1);
9857
9858 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9859 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9860 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9861 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9862
9863 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9864 &awbLockAvailable, 1);
9865
9866 int32_t max_input_streams = 1;
9867 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9868 &max_input_streams,
9869 1);
9870
9871 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9872 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9873 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9874 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9875 HAL_PIXEL_FORMAT_YCbCr_420_888};
9876 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9877 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9878
9879 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9880 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9881 &max_latency,
9882 1);
9883
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009884#ifndef USE_HAL_3_3
9885 int32_t isp_sensitivity_range[2];
9886 isp_sensitivity_range[0] =
9887 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9888 isp_sensitivity_range[1] =
9889 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9890 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9891 isp_sensitivity_range,
9892 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9893#endif
9894
Thierry Strudel3d639192016-09-09 11:52:26 -07009895 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9896 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9897 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9898 available_hot_pixel_modes,
9899 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9900
9901 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9902 ANDROID_SHADING_MODE_FAST,
9903 ANDROID_SHADING_MODE_HIGH_QUALITY};
9904 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9905 available_shading_modes,
9906 3);
9907
9908 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9909 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9910 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9911 available_lens_shading_map_modes,
9912 2);
9913
9914 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9915 ANDROID_EDGE_MODE_FAST,
9916 ANDROID_EDGE_MODE_HIGH_QUALITY,
9917 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9918 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9919 available_edge_modes,
9920 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9921
9922 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9923 ANDROID_NOISE_REDUCTION_MODE_FAST,
9924 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9925 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9926 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9927 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9928 available_noise_red_modes,
9929 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9930
9931 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9932 ANDROID_TONEMAP_MODE_FAST,
9933 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9934 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9935 available_tonemap_modes,
9936 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9937
9938 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9939 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9940 available_hot_pixel_map_modes,
9941 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9942
9943 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9944 gCamCapability[cameraId]->reference_illuminant1);
9945 if (NAME_NOT_FOUND != val) {
9946 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9947 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9948 }
9949
9950 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9951 gCamCapability[cameraId]->reference_illuminant2);
9952 if (NAME_NOT_FOUND != val) {
9953 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9954 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9955 }
9956
9957 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9958 (void *)gCamCapability[cameraId]->forward_matrix1,
9959 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9960
9961 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9962 (void *)gCamCapability[cameraId]->forward_matrix2,
9963 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9964
9965 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9966 (void *)gCamCapability[cameraId]->color_transform1,
9967 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9968
9969 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9970 (void *)gCamCapability[cameraId]->color_transform2,
9971 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9972
9973 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9974 (void *)gCamCapability[cameraId]->calibration_transform1,
9975 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9976
9977 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9978 (void *)gCamCapability[cameraId]->calibration_transform2,
9979 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9980
9981 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9982 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9983 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9984 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9985 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9986 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9987 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9988 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9989 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9990 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9991 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9992 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9993 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9994 ANDROID_JPEG_GPS_COORDINATES,
9995 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9996 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9997 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9998 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9999 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10000 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10001 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10002 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10003 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10004 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010005#ifndef USE_HAL_3_3
10006 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10007#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010008 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010009 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010010 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10011 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010012 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010013 /* DevCamDebug metadata request_keys_basic */
10014 DEVCAMDEBUG_META_ENABLE,
10015 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010016 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010017 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010018 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010019 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -080010020 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010021
10022 size_t request_keys_cnt =
10023 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10024 Vector<int32_t> available_request_keys;
10025 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10026 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10027 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10028 }
10029
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010030 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010031 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10032 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10033 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010034 }
10035
Thierry Strudel3d639192016-09-09 11:52:26 -070010036 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10037 available_request_keys.array(), available_request_keys.size());
10038
10039 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10040 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10041 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10042 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10043 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10044 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10045 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10046 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10047 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10048 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10049 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10050 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10051 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10052 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10053 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10054 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10055 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010056 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010057 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10058 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10059 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010060 ANDROID_STATISTICS_FACE_SCORES,
10061#ifndef USE_HAL_3_3
10062 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10063#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010064 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010065 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010066 // DevCamDebug metadata result_keys_basic
10067 DEVCAMDEBUG_META_ENABLE,
10068 // DevCamDebug metadata result_keys AF
10069 DEVCAMDEBUG_AF_LENS_POSITION,
10070 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10071 DEVCAMDEBUG_AF_TOF_DISTANCE,
10072 DEVCAMDEBUG_AF_LUMA,
10073 DEVCAMDEBUG_AF_HAF_STATE,
10074 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10075 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10076 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10077 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10078 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10079 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10080 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10081 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10082 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10083 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10084 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10085 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10086 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10087 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10088 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10089 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10090 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10091 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10092 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10093 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10094 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10095 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10096 // DevCamDebug metadata result_keys AEC
10097 DEVCAMDEBUG_AEC_TARGET_LUMA,
10098 DEVCAMDEBUG_AEC_COMP_LUMA,
10099 DEVCAMDEBUG_AEC_AVG_LUMA,
10100 DEVCAMDEBUG_AEC_CUR_LUMA,
10101 DEVCAMDEBUG_AEC_LINECOUNT,
10102 DEVCAMDEBUG_AEC_REAL_GAIN,
10103 DEVCAMDEBUG_AEC_EXP_INDEX,
10104 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010105 // DevCamDebug metadata result_keys zzHDR
10106 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10107 DEVCAMDEBUG_AEC_L_LINECOUNT,
10108 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10109 DEVCAMDEBUG_AEC_S_LINECOUNT,
10110 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10111 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10112 // DevCamDebug metadata result_keys ADRC
10113 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10114 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10115 DEVCAMDEBUG_AEC_GTM_RATIO,
10116 DEVCAMDEBUG_AEC_LTM_RATIO,
10117 DEVCAMDEBUG_AEC_LA_RATIO,
10118 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010119 // DevCamDebug metadata result_keys AWB
10120 DEVCAMDEBUG_AWB_R_GAIN,
10121 DEVCAMDEBUG_AWB_G_GAIN,
10122 DEVCAMDEBUG_AWB_B_GAIN,
10123 DEVCAMDEBUG_AWB_CCT,
10124 DEVCAMDEBUG_AWB_DECISION,
10125 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010126 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10127 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10128 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010129 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010130 };
10131
Thierry Strudel3d639192016-09-09 11:52:26 -070010132 size_t result_keys_cnt =
10133 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10134
10135 Vector<int32_t> available_result_keys;
10136 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10137 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10138 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10139 }
10140 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10141 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10142 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10143 }
10144 if (supportedFaceDetectMode == 1) {
10145 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10146 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10147 } else if ((supportedFaceDetectMode == 2) ||
10148 (supportedFaceDetectMode == 3)) {
10149 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10150 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10151 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010152#ifndef USE_HAL_3_3
10153 if (hasBlackRegions) {
10154 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10155 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10156 }
10157#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010158
10159 if (gExposeEnableZslKey) {
10160 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10161 }
10162
Thierry Strudel3d639192016-09-09 11:52:26 -070010163 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10164 available_result_keys.array(), available_result_keys.size());
10165
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010166 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010167 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10168 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10169 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10170 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10171 ANDROID_SCALER_CROPPING_TYPE,
10172 ANDROID_SYNC_MAX_LATENCY,
10173 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10174 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10175 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10176 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10177 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10178 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10179 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10180 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10181 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10182 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10183 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10184 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10185 ANDROID_LENS_FACING,
10186 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10187 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10188 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10189 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10190 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10191 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10192 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10193 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10194 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10195 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10196 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10197 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10198 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10199 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10200 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10201 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10202 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10203 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10204 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10205 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010206 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010207 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10208 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10209 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10210 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10211 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10212 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10213 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10214 ANDROID_CONTROL_AVAILABLE_MODES,
10215 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10216 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10217 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10218 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010219 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10220#ifndef USE_HAL_3_3
10221 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10222 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10223#endif
10224 };
10225
10226 Vector<int32_t> available_characteristics_keys;
10227 available_characteristics_keys.appendArray(characteristics_keys_basic,
10228 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10229#ifndef USE_HAL_3_3
10230 if (hasBlackRegions) {
10231 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10232 }
10233#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010234
10235 if (0 <= indexPD) {
10236 int32_t depthKeys[] = {
10237 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10238 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10239 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10240 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10241 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10242 };
10243 available_characteristics_keys.appendArray(depthKeys,
10244 sizeof(depthKeys) / sizeof(depthKeys[0]));
10245 }
10246
Thierry Strudel3d639192016-09-09 11:52:26 -070010247 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010248 available_characteristics_keys.array(),
10249 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010250
10251 /*available stall durations depend on the hw + sw and will be different for different devices */
10252 /*have to add for raw after implementation*/
10253 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10254 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10255
10256 Vector<int64_t> available_stall_durations;
10257 for (uint32_t j = 0; j < stall_formats_count; j++) {
10258 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10259 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10260 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10261 available_stall_durations.add(stall_formats[j]);
10262 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10263 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10264 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10265 }
10266 } else {
10267 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10268 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10269 available_stall_durations.add(stall_formats[j]);
10270 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10271 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10272 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10273 }
10274 }
10275 }
10276 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10277 available_stall_durations.array(),
10278 available_stall_durations.size());
10279
10280 //QCAMERA3_OPAQUE_RAW
10281 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10282 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10283 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10284 case LEGACY_RAW:
10285 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10286 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10287 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10288 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10289 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10290 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10291 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10292 break;
10293 case MIPI_RAW:
10294 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10295 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10296 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10297 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10298 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10299 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10300 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10301 break;
10302 default:
10303 LOGE("unknown opaque_raw_format %d",
10304 gCamCapability[cameraId]->opaque_raw_fmt);
10305 break;
10306 }
10307 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10308
10309 Vector<int32_t> strides;
10310 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10311 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10312 cam_stream_buf_plane_info_t buf_planes;
10313 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10314 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10315 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10316 &gCamCapability[cameraId]->padding_info, &buf_planes);
10317 strides.add(buf_planes.plane_info.mp[0].stride);
10318 }
10319 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10320 strides.size());
10321
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010322 //TBD: remove the following line once backend advertises zzHDR in feature mask
10323 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010324 //Video HDR default
10325 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10326 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010327 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010328 int32_t vhdr_mode[] = {
10329 QCAMERA3_VIDEO_HDR_MODE_OFF,
10330 QCAMERA3_VIDEO_HDR_MODE_ON};
10331
10332 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10333 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10334 vhdr_mode, vhdr_mode_count);
10335 }
10336
Thierry Strudel3d639192016-09-09 11:52:26 -070010337 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10338 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10339 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10340
10341 uint8_t isMonoOnly =
10342 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10343 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10344 &isMonoOnly, 1);
10345
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010346#ifndef USE_HAL_3_3
10347 Vector<int32_t> opaque_size;
10348 for (size_t j = 0; j < scalar_formats_count; j++) {
10349 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10350 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10351 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10352 cam_stream_buf_plane_info_t buf_planes;
10353
10354 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10355 &gCamCapability[cameraId]->padding_info, &buf_planes);
10356
10357 if (rc == 0) {
10358 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10359 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10360 opaque_size.add(buf_planes.plane_info.frame_len);
10361 }else {
10362 LOGE("raw frame calculation failed!");
10363 }
10364 }
10365 }
10366 }
10367
10368 if ((opaque_size.size() > 0) &&
10369 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10370 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10371 else
10372 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10373#endif
10374
Thierry Strudel04e026f2016-10-10 11:27:36 -070010375 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10376 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10377 size = 0;
10378 count = CAM_IR_MODE_MAX;
10379 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10380 for (size_t i = 0; i < count; i++) {
10381 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10382 gCamCapability[cameraId]->supported_ir_modes[i]);
10383 if (NAME_NOT_FOUND != val) {
10384 avail_ir_modes[size] = (int32_t)val;
10385 size++;
10386 }
10387 }
10388 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10389 avail_ir_modes, size);
10390 }
10391
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010392 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10393 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10394 size = 0;
10395 count = CAM_AEC_CONVERGENCE_MAX;
10396 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10397 for (size_t i = 0; i < count; i++) {
10398 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10399 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10400 if (NAME_NOT_FOUND != val) {
10401 available_instant_aec_modes[size] = (int32_t)val;
10402 size++;
10403 }
10404 }
10405 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10406 available_instant_aec_modes, size);
10407 }
10408
Thierry Strudel54dc9782017-02-15 12:12:10 -080010409 int32_t sharpness_range[] = {
10410 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10411 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10412 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10413
10414 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10415 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10416 size = 0;
10417 count = CAM_BINNING_CORRECTION_MODE_MAX;
10418 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10419 for (size_t i = 0; i < count; i++) {
10420 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10421 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10422 gCamCapability[cameraId]->supported_binning_modes[i]);
10423 if (NAME_NOT_FOUND != val) {
10424 avail_binning_modes[size] = (int32_t)val;
10425 size++;
10426 }
10427 }
10428 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10429 avail_binning_modes, size);
10430 }
10431
10432 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10433 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10434 size = 0;
10435 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10436 for (size_t i = 0; i < count; i++) {
10437 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10438 gCamCapability[cameraId]->supported_aec_modes[i]);
10439 if (NAME_NOT_FOUND != val)
10440 available_aec_modes[size++] = val;
10441 }
10442 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10443 available_aec_modes, size);
10444 }
10445
10446 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10447 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10448 size = 0;
10449 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10450 for (size_t i = 0; i < count; i++) {
10451 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10452 gCamCapability[cameraId]->supported_iso_modes[i]);
10453 if (NAME_NOT_FOUND != val)
10454 available_iso_modes[size++] = val;
10455 }
10456 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10457 available_iso_modes, size);
10458 }
10459
10460 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010461 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010462 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10463 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10464 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10465
10466 int32_t available_saturation_range[4];
10467 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10468 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10469 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10470 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10471 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10472 available_saturation_range, 4);
10473
10474 uint8_t is_hdr_values[2];
10475 is_hdr_values[0] = 0;
10476 is_hdr_values[1] = 1;
10477 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10478 is_hdr_values, 2);
10479
10480 float is_hdr_confidence_range[2];
10481 is_hdr_confidence_range[0] = 0.0;
10482 is_hdr_confidence_range[1] = 1.0;
10483 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10484 is_hdr_confidence_range, 2);
10485
Emilian Peev0a972ef2017-03-16 10:25:53 +000010486 size_t eepromLength = strnlen(
10487 reinterpret_cast<const char *>(
10488 gCamCapability[cameraId]->eeprom_version_info),
10489 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10490 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010491 char easelInfo[] = ",E:N";
10492 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10493 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10494 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010495 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10496 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010497 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010498 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10499 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10500 }
10501
Thierry Strudel3d639192016-09-09 11:52:26 -070010502 gStaticMetadata[cameraId] = staticInfo.release();
10503 return rc;
10504}
10505
10506/*===========================================================================
10507 * FUNCTION : makeTable
10508 *
10509 * DESCRIPTION: make a table of sizes
10510 *
10511 * PARAMETERS :
10512 *
10513 *
10514 *==========================================================================*/
10515void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10516 size_t max_size, int32_t *sizeTable)
10517{
10518 size_t j = 0;
10519 if (size > max_size) {
10520 size = max_size;
10521 }
10522 for (size_t i = 0; i < size; i++) {
10523 sizeTable[j] = dimTable[i].width;
10524 sizeTable[j+1] = dimTable[i].height;
10525 j+=2;
10526 }
10527}
10528
10529/*===========================================================================
10530 * FUNCTION : makeFPSTable
10531 *
10532 * DESCRIPTION: make a table of fps ranges
10533 *
10534 * PARAMETERS :
10535 *
10536 *==========================================================================*/
10537void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10538 size_t max_size, int32_t *fpsRangesTable)
10539{
10540 size_t j = 0;
10541 if (size > max_size) {
10542 size = max_size;
10543 }
10544 for (size_t i = 0; i < size; i++) {
10545 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10546 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10547 j+=2;
10548 }
10549}
10550
10551/*===========================================================================
10552 * FUNCTION : makeOverridesList
10553 *
10554 * DESCRIPTION: make a list of scene mode overrides
10555 *
10556 * PARAMETERS :
10557 *
10558 *
10559 *==========================================================================*/
10560void QCamera3HardwareInterface::makeOverridesList(
10561 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10562 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10563{
10564 /*daemon will give a list of overrides for all scene modes.
10565 However we should send the fwk only the overrides for the scene modes
10566 supported by the framework*/
10567 size_t j = 0;
10568 if (size > max_size) {
10569 size = max_size;
10570 }
10571 size_t focus_count = CAM_FOCUS_MODE_MAX;
10572 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10573 focus_count);
10574 for (size_t i = 0; i < size; i++) {
10575 bool supt = false;
10576 size_t index = supported_indexes[i];
10577 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10578 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10579 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10580 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10581 overridesTable[index].awb_mode);
10582 if (NAME_NOT_FOUND != val) {
10583 overridesList[j+1] = (uint8_t)val;
10584 }
10585 uint8_t focus_override = overridesTable[index].af_mode;
10586 for (size_t k = 0; k < focus_count; k++) {
10587 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10588 supt = true;
10589 break;
10590 }
10591 }
10592 if (supt) {
10593 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10594 focus_override);
10595 if (NAME_NOT_FOUND != val) {
10596 overridesList[j+2] = (uint8_t)val;
10597 }
10598 } else {
10599 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10600 }
10601 j+=3;
10602 }
10603}
10604
10605/*===========================================================================
10606 * FUNCTION : filterJpegSizes
10607 *
10608 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10609 * could be downscaled to
10610 *
10611 * PARAMETERS :
10612 *
10613 * RETURN : length of jpegSizes array
10614 *==========================================================================*/
10615
10616size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10617 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10618 uint8_t downscale_factor)
10619{
10620 if (0 == downscale_factor) {
10621 downscale_factor = 1;
10622 }
10623
10624 int32_t min_width = active_array_size.width / downscale_factor;
10625 int32_t min_height = active_array_size.height / downscale_factor;
10626 size_t jpegSizesCnt = 0;
10627 if (processedSizesCnt > maxCount) {
10628 processedSizesCnt = maxCount;
10629 }
10630 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10631 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10632 jpegSizes[jpegSizesCnt] = processedSizes[i];
10633 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10634 jpegSizesCnt += 2;
10635 }
10636 }
10637 return jpegSizesCnt;
10638}
10639
10640/*===========================================================================
10641 * FUNCTION : computeNoiseModelEntryS
10642 *
10643 * DESCRIPTION: function to map a given sensitivity to the S noise
10644 * model parameters in the DNG noise model.
10645 *
10646 * PARAMETERS : sens : the sensor sensitivity
10647 *
10648 ** RETURN : S (sensor amplification) noise
10649 *
10650 *==========================================================================*/
10651double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10652 double s = gCamCapability[mCameraId]->gradient_S * sens +
10653 gCamCapability[mCameraId]->offset_S;
10654 return ((s < 0.0) ? 0.0 : s);
10655}
10656
10657/*===========================================================================
10658 * FUNCTION : computeNoiseModelEntryO
10659 *
10660 * DESCRIPTION: function to map a given sensitivity to the O noise
10661 * model parameters in the DNG noise model.
10662 *
10663 * PARAMETERS : sens : the sensor sensitivity
10664 *
10665 ** RETURN : O (sensor readout) noise
10666 *
10667 *==========================================================================*/
10668double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10669 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10670 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10671 1.0 : (1.0 * sens / max_analog_sens);
10672 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10673 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10674 return ((o < 0.0) ? 0.0 : o);
10675}
10676
10677/*===========================================================================
10678 * FUNCTION : getSensorSensitivity
10679 *
10680 * DESCRIPTION: convert iso_mode to an integer value
10681 *
10682 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10683 *
10684 ** RETURN : sensitivity supported by sensor
10685 *
10686 *==========================================================================*/
10687int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10688{
10689 int32_t sensitivity;
10690
10691 switch (iso_mode) {
10692 case CAM_ISO_MODE_100:
10693 sensitivity = 100;
10694 break;
10695 case CAM_ISO_MODE_200:
10696 sensitivity = 200;
10697 break;
10698 case CAM_ISO_MODE_400:
10699 sensitivity = 400;
10700 break;
10701 case CAM_ISO_MODE_800:
10702 sensitivity = 800;
10703 break;
10704 case CAM_ISO_MODE_1600:
10705 sensitivity = 1600;
10706 break;
10707 default:
10708 sensitivity = -1;
10709 break;
10710 }
10711 return sensitivity;
10712}
10713
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010714int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010715 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010716 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10717 // to connect to Easel.
10718 bool doNotpowerOnEasel =
10719 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10720
10721 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010722 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10723 return OK;
10724 }
10725
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010726 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010727 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010728 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010729 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010730 return res;
10731 }
10732
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010733 EaselManagerClientOpened = true;
10734
10735 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010736 if (res != OK) {
10737 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10738 }
10739
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010740 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010741 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010742
10743 // Expose enableZsl key only when HDR+ mode is enabled.
10744 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010745 }
10746
10747 return OK;
10748}
10749
Thierry Strudel3d639192016-09-09 11:52:26 -070010750/*===========================================================================
10751 * FUNCTION : getCamInfo
10752 *
10753 * DESCRIPTION: query camera capabilities
10754 *
10755 * PARAMETERS :
10756 * @cameraId : camera Id
10757 * @info : camera info struct to be filled in with camera capabilities
10758 *
10759 * RETURN : int type of status
10760 * NO_ERROR -- success
10761 * none-zero failure code
10762 *==========================================================================*/
10763int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10764 struct camera_info *info)
10765{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010766 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010767 int rc = 0;
10768
10769 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010770
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010771 {
10772 Mutex::Autolock l(gHdrPlusClientLock);
10773 rc = initHdrPlusClientLocked();
10774 if (rc != OK) {
10775 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10776 pthread_mutex_unlock(&gCamLock);
10777 return rc;
10778 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010779 }
10780
Thierry Strudel3d639192016-09-09 11:52:26 -070010781 if (NULL == gCamCapability[cameraId]) {
10782 rc = initCapabilities(cameraId);
10783 if (rc < 0) {
10784 pthread_mutex_unlock(&gCamLock);
10785 return rc;
10786 }
10787 }
10788
10789 if (NULL == gStaticMetadata[cameraId]) {
10790 rc = initStaticMetadata(cameraId);
10791 if (rc < 0) {
10792 pthread_mutex_unlock(&gCamLock);
10793 return rc;
10794 }
10795 }
10796
10797 switch(gCamCapability[cameraId]->position) {
10798 case CAM_POSITION_BACK:
10799 case CAM_POSITION_BACK_AUX:
10800 info->facing = CAMERA_FACING_BACK;
10801 break;
10802
10803 case CAM_POSITION_FRONT:
10804 case CAM_POSITION_FRONT_AUX:
10805 info->facing = CAMERA_FACING_FRONT;
10806 break;
10807
10808 default:
10809 LOGE("Unknown position type %d for camera id:%d",
10810 gCamCapability[cameraId]->position, cameraId);
10811 rc = -1;
10812 break;
10813 }
10814
10815
10816 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010817#ifndef USE_HAL_3_3
10818 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10819#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010820 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010821#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010822 info->static_camera_characteristics = gStaticMetadata[cameraId];
10823
10824 //For now assume both cameras can operate independently.
10825 info->conflicting_devices = NULL;
10826 info->conflicting_devices_length = 0;
10827
10828 //resource cost is 100 * MIN(1.0, m/M),
10829 //where m is throughput requirement with maximum stream configuration
10830 //and M is CPP maximum throughput.
10831 float max_fps = 0.0;
10832 for (uint32_t i = 0;
10833 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10834 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10835 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10836 }
10837 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10838 gCamCapability[cameraId]->active_array_size.width *
10839 gCamCapability[cameraId]->active_array_size.height * max_fps /
10840 gCamCapability[cameraId]->max_pixel_bandwidth;
10841 info->resource_cost = 100 * MIN(1.0, ratio);
10842 LOGI("camera %d resource cost is %d", cameraId,
10843 info->resource_cost);
10844
10845 pthread_mutex_unlock(&gCamLock);
10846 return rc;
10847}
10848
10849/*===========================================================================
10850 * FUNCTION : translateCapabilityToMetadata
10851 *
10852 * DESCRIPTION: translate the capability into camera_metadata_t
10853 *
10854 * PARAMETERS : type of the request
10855 *
10856 *
10857 * RETURN : success: camera_metadata_t*
10858 * failure: NULL
10859 *
10860 *==========================================================================*/
10861camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10862{
10863 if (mDefaultMetadata[type] != NULL) {
10864 return mDefaultMetadata[type];
10865 }
10866 //first time we are handling this request
10867 //fill up the metadata structure using the wrapper class
10868 CameraMetadata settings;
10869 //translate from cam_capability_t to camera_metadata_tag_t
10870 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10871 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10872 int32_t defaultRequestID = 0;
10873 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10874
10875 /* OIS disable */
10876 char ois_prop[PROPERTY_VALUE_MAX];
10877 memset(ois_prop, 0, sizeof(ois_prop));
10878 property_get("persist.camera.ois.disable", ois_prop, "0");
10879 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10880
10881 /* Force video to use OIS */
10882 char videoOisProp[PROPERTY_VALUE_MAX];
10883 memset(videoOisProp, 0, sizeof(videoOisProp));
10884 property_get("persist.camera.ois.video", videoOisProp, "1");
10885 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010886
10887 // Hybrid AE enable/disable
10888 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10889 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10890 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10891 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10892
Thierry Strudel3d639192016-09-09 11:52:26 -070010893 uint8_t controlIntent = 0;
10894 uint8_t focusMode;
10895 uint8_t vsMode;
10896 uint8_t optStabMode;
10897 uint8_t cacMode;
10898 uint8_t edge_mode;
10899 uint8_t noise_red_mode;
10900 uint8_t tonemap_mode;
10901 bool highQualityModeEntryAvailable = FALSE;
10902 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010903 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010904 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10905 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010906 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010907 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010908 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010909
Thierry Strudel3d639192016-09-09 11:52:26 -070010910 switch (type) {
10911 case CAMERA3_TEMPLATE_PREVIEW:
10912 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10913 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10914 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10915 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10916 edge_mode = ANDROID_EDGE_MODE_FAST;
10917 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10918 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10919 break;
10920 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10921 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10922 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10923 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10924 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10925 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10926 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10927 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10928 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10929 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10930 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10931 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10932 highQualityModeEntryAvailable = TRUE;
10933 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10934 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10935 fastModeEntryAvailable = TRUE;
10936 }
10937 }
10938 if (highQualityModeEntryAvailable) {
10939 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10940 } else if (fastModeEntryAvailable) {
10941 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10942 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010943 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10944 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10945 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010946 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010947 break;
10948 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10949 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10950 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10951 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010952 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10953 edge_mode = ANDROID_EDGE_MODE_FAST;
10954 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10955 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10956 if (forceVideoOis)
10957 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10958 break;
10959 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10960 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10961 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10962 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010963 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10964 edge_mode = ANDROID_EDGE_MODE_FAST;
10965 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10966 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10967 if (forceVideoOis)
10968 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10969 break;
10970 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10971 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10972 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10973 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10974 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10975 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10976 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10977 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10978 break;
10979 case CAMERA3_TEMPLATE_MANUAL:
10980 edge_mode = ANDROID_EDGE_MODE_FAST;
10981 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10982 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10983 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10984 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10985 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10986 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10987 break;
10988 default:
10989 edge_mode = ANDROID_EDGE_MODE_FAST;
10990 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10991 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10992 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10993 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10994 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10995 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10996 break;
10997 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010998 // Set CAC to OFF if underlying device doesn't support
10999 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11000 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11001 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011002 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11003 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11004 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11005 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11006 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11007 }
11008 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011009 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011010 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011011
11012 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11013 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11014 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11015 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11016 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11017 || ois_disable)
11018 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11019 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011020 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011021
11022 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11023 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11024
11025 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11026 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11027
11028 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11029 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11030
11031 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11032 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11033
11034 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11035 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11036
11037 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11038 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11039
11040 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11041 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11042
11043 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11044 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11045
11046 /*flash*/
11047 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11048 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11049
11050 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11051 settings.update(ANDROID_FLASH_FIRING_POWER,
11052 &flashFiringLevel, 1);
11053
11054 /* lens */
11055 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11056 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11057
11058 if (gCamCapability[mCameraId]->filter_densities_count) {
11059 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11060 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11061 gCamCapability[mCameraId]->filter_densities_count);
11062 }
11063
11064 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11065 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11066
Thierry Strudel3d639192016-09-09 11:52:26 -070011067 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11068 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11069
11070 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11071 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11072
11073 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11074 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11075
11076 /* face detection (default to OFF) */
11077 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11078 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11079
Thierry Strudel54dc9782017-02-15 12:12:10 -080011080 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11081 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011082
11083 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11084 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11085
11086 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11087 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11088
Thierry Strudel3d639192016-09-09 11:52:26 -070011089
11090 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11091 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11092
11093 /* Exposure time(Update the Min Exposure Time)*/
11094 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11095 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11096
11097 /* frame duration */
11098 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11099 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11100
11101 /* sensitivity */
11102 static const int32_t default_sensitivity = 100;
11103 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011104#ifndef USE_HAL_3_3
11105 static const int32_t default_isp_sensitivity =
11106 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11107 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11108#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011109
11110 /*edge mode*/
11111 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11112
11113 /*noise reduction mode*/
11114 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11115
11116 /*color correction mode*/
11117 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11118 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11119
11120 /*transform matrix mode*/
11121 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11122
11123 int32_t scaler_crop_region[4];
11124 scaler_crop_region[0] = 0;
11125 scaler_crop_region[1] = 0;
11126 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11127 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11128 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11129
11130 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11131 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11132
11133 /*focus distance*/
11134 float focus_distance = 0.0;
11135 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11136
11137 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011138 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011139 float max_range = 0.0;
11140 float max_fixed_fps = 0.0;
11141 int32_t fps_range[2] = {0, 0};
11142 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11143 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011144 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11145 TEMPLATE_MAX_PREVIEW_FPS) {
11146 continue;
11147 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011148 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11149 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11150 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11151 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11152 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11153 if (range > max_range) {
11154 fps_range[0] =
11155 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11156 fps_range[1] =
11157 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11158 max_range = range;
11159 }
11160 } else {
11161 if (range < 0.01 && max_fixed_fps <
11162 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11163 fps_range[0] =
11164 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11165 fps_range[1] =
11166 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11167 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11168 }
11169 }
11170 }
11171 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11172
11173 /*precapture trigger*/
11174 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11175 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11176
11177 /*af trigger*/
11178 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11179 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11180
11181 /* ae & af regions */
11182 int32_t active_region[] = {
11183 gCamCapability[mCameraId]->active_array_size.left,
11184 gCamCapability[mCameraId]->active_array_size.top,
11185 gCamCapability[mCameraId]->active_array_size.left +
11186 gCamCapability[mCameraId]->active_array_size.width,
11187 gCamCapability[mCameraId]->active_array_size.top +
11188 gCamCapability[mCameraId]->active_array_size.height,
11189 0};
11190 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11191 sizeof(active_region) / sizeof(active_region[0]));
11192 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11193 sizeof(active_region) / sizeof(active_region[0]));
11194
11195 /* black level lock */
11196 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11197 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11198
Thierry Strudel3d639192016-09-09 11:52:26 -070011199 //special defaults for manual template
11200 if (type == CAMERA3_TEMPLATE_MANUAL) {
11201 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11202 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11203
11204 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11205 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11206
11207 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11208 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11209
11210 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11211 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11212
11213 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11214 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11215
11216 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11217 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11218 }
11219
11220
11221 /* TNR
11222 * We'll use this location to determine which modes TNR will be set.
11223 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11224 * This is not to be confused with linking on a per stream basis that decision
11225 * is still on per-session basis and will be handled as part of config stream
11226 */
11227 uint8_t tnr_enable = 0;
11228
11229 if (m_bTnrPreview || m_bTnrVideo) {
11230
11231 switch (type) {
11232 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11233 tnr_enable = 1;
11234 break;
11235
11236 default:
11237 tnr_enable = 0;
11238 break;
11239 }
11240
11241 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11242 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11243 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11244
11245 LOGD("TNR:%d with process plate %d for template:%d",
11246 tnr_enable, tnr_process_type, type);
11247 }
11248
11249 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011250 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011251 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11252
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011253 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011254 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11255
Shuzhen Wang920ea402017-05-03 08:49:39 -070011256 uint8_t related_camera_id = mCameraId;
11257 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011258
11259 /* CDS default */
11260 char prop[PROPERTY_VALUE_MAX];
11261 memset(prop, 0, sizeof(prop));
11262 property_get("persist.camera.CDS", prop, "Auto");
11263 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11264 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11265 if (CAM_CDS_MODE_MAX == cds_mode) {
11266 cds_mode = CAM_CDS_MODE_AUTO;
11267 }
11268
11269 /* Disabling CDS in templates which have TNR enabled*/
11270 if (tnr_enable)
11271 cds_mode = CAM_CDS_MODE_OFF;
11272
11273 int32_t mode = cds_mode;
11274 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011275
Thierry Strudel269c81a2016-10-12 12:13:59 -070011276 /* Manual Convergence AEC Speed is disabled by default*/
11277 float default_aec_speed = 0;
11278 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11279
11280 /* Manual Convergence AWB Speed is disabled by default*/
11281 float default_awb_speed = 0;
11282 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11283
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011284 // Set instant AEC to normal convergence by default
11285 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11286 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11287
Shuzhen Wang19463d72016-03-08 11:09:52 -080011288 /* hybrid ae */
11289 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11290
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011291 if (gExposeEnableZslKey) {
11292 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11293 }
11294
Thierry Strudel3d639192016-09-09 11:52:26 -070011295 mDefaultMetadata[type] = settings.release();
11296
11297 return mDefaultMetadata[type];
11298}
11299
11300/*===========================================================================
11301 * FUNCTION : setFrameParameters
11302 *
11303 * DESCRIPTION: set parameters per frame as requested in the metadata from
11304 * framework
11305 *
11306 * PARAMETERS :
11307 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011308 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011309 * @blob_request: Whether this request is a blob request or not
11310 *
11311 * RETURN : success: NO_ERROR
11312 * failure:
11313 *==========================================================================*/
11314int QCamera3HardwareInterface::setFrameParameters(
11315 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011316 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011317 int blob_request,
11318 uint32_t snapshotStreamId)
11319{
11320 /*translate from camera_metadata_t type to parm_type_t*/
11321 int rc = 0;
11322 int32_t hal_version = CAM_HAL_V3;
11323
11324 clear_metadata_buffer(mParameters);
11325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11326 LOGE("Failed to set hal version in the parameters");
11327 return BAD_VALUE;
11328 }
11329
11330 /*we need to update the frame number in the parameters*/
11331 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11332 request->frame_number)) {
11333 LOGE("Failed to set the frame number in the parameters");
11334 return BAD_VALUE;
11335 }
11336
11337 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011338 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011339 LOGE("Failed to set stream type mask in the parameters");
11340 return BAD_VALUE;
11341 }
11342
11343 if (mUpdateDebugLevel) {
11344 uint32_t dummyDebugLevel = 0;
11345 /* The value of dummyDebugLevel is irrelavent. On
11346 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11347 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11348 dummyDebugLevel)) {
11349 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11350 return BAD_VALUE;
11351 }
11352 mUpdateDebugLevel = false;
11353 }
11354
11355 if(request->settings != NULL){
11356 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11357 if (blob_request)
11358 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11359 }
11360
11361 return rc;
11362}
11363
11364/*===========================================================================
11365 * FUNCTION : setReprocParameters
11366 *
11367 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11368 * return it.
11369 *
11370 * PARAMETERS :
11371 * @request : request that needs to be serviced
11372 *
11373 * RETURN : success: NO_ERROR
11374 * failure:
11375 *==========================================================================*/
11376int32_t QCamera3HardwareInterface::setReprocParameters(
11377 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11378 uint32_t snapshotStreamId)
11379{
11380 /*translate from camera_metadata_t type to parm_type_t*/
11381 int rc = 0;
11382
11383 if (NULL == request->settings){
11384 LOGE("Reprocess settings cannot be NULL");
11385 return BAD_VALUE;
11386 }
11387
11388 if (NULL == reprocParam) {
11389 LOGE("Invalid reprocessing metadata buffer");
11390 return BAD_VALUE;
11391 }
11392 clear_metadata_buffer(reprocParam);
11393
11394 /*we need to update the frame number in the parameters*/
11395 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11396 request->frame_number)) {
11397 LOGE("Failed to set the frame number in the parameters");
11398 return BAD_VALUE;
11399 }
11400
11401 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11402 if (rc < 0) {
11403 LOGE("Failed to translate reproc request");
11404 return rc;
11405 }
11406
11407 CameraMetadata frame_settings;
11408 frame_settings = request->settings;
11409 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11410 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11411 int32_t *crop_count =
11412 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11413 int32_t *crop_data =
11414 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11415 int32_t *roi_map =
11416 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11417 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11418 cam_crop_data_t crop_meta;
11419 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11420 crop_meta.num_of_streams = 1;
11421 crop_meta.crop_info[0].crop.left = crop_data[0];
11422 crop_meta.crop_info[0].crop.top = crop_data[1];
11423 crop_meta.crop_info[0].crop.width = crop_data[2];
11424 crop_meta.crop_info[0].crop.height = crop_data[3];
11425
11426 crop_meta.crop_info[0].roi_map.left =
11427 roi_map[0];
11428 crop_meta.crop_info[0].roi_map.top =
11429 roi_map[1];
11430 crop_meta.crop_info[0].roi_map.width =
11431 roi_map[2];
11432 crop_meta.crop_info[0].roi_map.height =
11433 roi_map[3];
11434
11435 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11436 rc = BAD_VALUE;
11437 }
11438 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11439 request->input_buffer->stream,
11440 crop_meta.crop_info[0].crop.left,
11441 crop_meta.crop_info[0].crop.top,
11442 crop_meta.crop_info[0].crop.width,
11443 crop_meta.crop_info[0].crop.height);
11444 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11445 request->input_buffer->stream,
11446 crop_meta.crop_info[0].roi_map.left,
11447 crop_meta.crop_info[0].roi_map.top,
11448 crop_meta.crop_info[0].roi_map.width,
11449 crop_meta.crop_info[0].roi_map.height);
11450 } else {
11451 LOGE("Invalid reprocess crop count %d!", *crop_count);
11452 }
11453 } else {
11454 LOGE("No crop data from matching output stream");
11455 }
11456
11457 /* These settings are not needed for regular requests so handle them specially for
11458 reprocess requests; information needed for EXIF tags */
11459 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11460 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11461 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11462 if (NAME_NOT_FOUND != val) {
11463 uint32_t flashMode = (uint32_t)val;
11464 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11465 rc = BAD_VALUE;
11466 }
11467 } else {
11468 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11469 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11470 }
11471 } else {
11472 LOGH("No flash mode in reprocess settings");
11473 }
11474
11475 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11476 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11477 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11478 rc = BAD_VALUE;
11479 }
11480 } else {
11481 LOGH("No flash state in reprocess settings");
11482 }
11483
11484 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11485 uint8_t *reprocessFlags =
11486 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11487 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11488 *reprocessFlags)) {
11489 rc = BAD_VALUE;
11490 }
11491 }
11492
Thierry Strudel54dc9782017-02-15 12:12:10 -080011493 // Add exif debug data to internal metadata
11494 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11495 mm_jpeg_debug_exif_params_t *debug_params =
11496 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11497 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11498 // AE
11499 if (debug_params->ae_debug_params_valid == TRUE) {
11500 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11501 debug_params->ae_debug_params);
11502 }
11503 // AWB
11504 if (debug_params->awb_debug_params_valid == TRUE) {
11505 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11506 debug_params->awb_debug_params);
11507 }
11508 // AF
11509 if (debug_params->af_debug_params_valid == TRUE) {
11510 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11511 debug_params->af_debug_params);
11512 }
11513 // ASD
11514 if (debug_params->asd_debug_params_valid == TRUE) {
11515 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11516 debug_params->asd_debug_params);
11517 }
11518 // Stats
11519 if (debug_params->stats_debug_params_valid == TRUE) {
11520 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11521 debug_params->stats_debug_params);
11522 }
11523 // BE Stats
11524 if (debug_params->bestats_debug_params_valid == TRUE) {
11525 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11526 debug_params->bestats_debug_params);
11527 }
11528 // BHIST
11529 if (debug_params->bhist_debug_params_valid == TRUE) {
11530 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11531 debug_params->bhist_debug_params);
11532 }
11533 // 3A Tuning
11534 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11535 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11536 debug_params->q3a_tuning_debug_params);
11537 }
11538 }
11539
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011540 // Add metadata which reprocess needs
11541 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11542 cam_reprocess_info_t *repro_info =
11543 (cam_reprocess_info_t *)frame_settings.find
11544 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011545 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011546 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011547 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011548 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011549 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011550 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011551 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011552 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011553 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011554 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011555 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011556 repro_info->pipeline_flip);
11557 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11558 repro_info->af_roi);
11559 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11560 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011561 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11562 CAM_INTF_PARM_ROTATION metadata then has been added in
11563 translateToHalMetadata. HAL need to keep this new rotation
11564 metadata. Otherwise, the old rotation info saved in the vendor tag
11565 would be used */
11566 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11567 CAM_INTF_PARM_ROTATION, reprocParam) {
11568 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11569 } else {
11570 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011571 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011572 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011573 }
11574
11575 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11576 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11577 roi.width and roi.height would be the final JPEG size.
11578 For now, HAL only checks this for reprocess request */
11579 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11580 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11581 uint8_t *enable =
11582 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11583 if (*enable == TRUE) {
11584 int32_t *crop_data =
11585 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11586 cam_stream_crop_info_t crop_meta;
11587 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11588 crop_meta.stream_id = 0;
11589 crop_meta.crop.left = crop_data[0];
11590 crop_meta.crop.top = crop_data[1];
11591 crop_meta.crop.width = crop_data[2];
11592 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011593 // The JPEG crop roi should match cpp output size
11594 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11595 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11596 crop_meta.roi_map.left = 0;
11597 crop_meta.roi_map.top = 0;
11598 crop_meta.roi_map.width = cpp_crop->crop.width;
11599 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011600 }
11601 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11602 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011603 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011604 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011605 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11606 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011607 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011608 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11609
11610 // Add JPEG scale information
11611 cam_dimension_t scale_dim;
11612 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11613 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11614 int32_t *roi =
11615 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11616 scale_dim.width = roi[2];
11617 scale_dim.height = roi[3];
11618 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11619 scale_dim);
11620 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11621 scale_dim.width, scale_dim.height, mCameraId);
11622 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011623 }
11624 }
11625
11626 return rc;
11627}
11628
11629/*===========================================================================
11630 * FUNCTION : saveRequestSettings
11631 *
11632 * DESCRIPTION: Add any settings that might have changed to the request settings
11633 * and save the settings to be applied on the frame
11634 *
11635 * PARAMETERS :
11636 * @jpegMetadata : the extracted and/or modified jpeg metadata
11637 * @request : request with initial settings
11638 *
11639 * RETURN :
11640 * camera_metadata_t* : pointer to the saved request settings
11641 *==========================================================================*/
11642camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11643 const CameraMetadata &jpegMetadata,
11644 camera3_capture_request_t *request)
11645{
11646 camera_metadata_t *resultMetadata;
11647 CameraMetadata camMetadata;
11648 camMetadata = request->settings;
11649
11650 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11651 int32_t thumbnail_size[2];
11652 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11653 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11654 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11655 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11656 }
11657
11658 if (request->input_buffer != NULL) {
11659 uint8_t reprocessFlags = 1;
11660 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11661 (uint8_t*)&reprocessFlags,
11662 sizeof(reprocessFlags));
11663 }
11664
11665 resultMetadata = camMetadata.release();
11666 return resultMetadata;
11667}
11668
11669/*===========================================================================
11670 * FUNCTION : setHalFpsRange
11671 *
11672 * DESCRIPTION: set FPS range parameter
11673 *
11674 *
11675 * PARAMETERS :
11676 * @settings : Metadata from framework
11677 * @hal_metadata: Metadata buffer
11678 *
11679 *
11680 * RETURN : success: NO_ERROR
11681 * failure:
11682 *==========================================================================*/
11683int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11684 metadata_buffer_t *hal_metadata)
11685{
11686 int32_t rc = NO_ERROR;
11687 cam_fps_range_t fps_range;
11688 fps_range.min_fps = (float)
11689 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11690 fps_range.max_fps = (float)
11691 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11692 fps_range.video_min_fps = fps_range.min_fps;
11693 fps_range.video_max_fps = fps_range.max_fps;
11694
11695 LOGD("aeTargetFpsRange fps: [%f %f]",
11696 fps_range.min_fps, fps_range.max_fps);
11697 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11698 * follows:
11699 * ---------------------------------------------------------------|
11700 * Video stream is absent in configure_streams |
11701 * (Camcorder preview before the first video record |
11702 * ---------------------------------------------------------------|
11703 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11704 * | | | vid_min/max_fps|
11705 * ---------------------------------------------------------------|
11706 * NO | [ 30, 240] | 240 | [240, 240] |
11707 * |-------------|-------------|----------------|
11708 * | [240, 240] | 240 | [240, 240] |
11709 * ---------------------------------------------------------------|
11710 * Video stream is present in configure_streams |
11711 * ---------------------------------------------------------------|
11712 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11713 * | | | vid_min/max_fps|
11714 * ---------------------------------------------------------------|
11715 * NO | [ 30, 240] | 240 | [240, 240] |
11716 * (camcorder prev |-------------|-------------|----------------|
11717 * after video rec | [240, 240] | 240 | [240, 240] |
11718 * is stopped) | | | |
11719 * ---------------------------------------------------------------|
11720 * YES | [ 30, 240] | 240 | [240, 240] |
11721 * |-------------|-------------|----------------|
11722 * | [240, 240] | 240 | [240, 240] |
11723 * ---------------------------------------------------------------|
11724 * When Video stream is absent in configure_streams,
11725 * preview fps = sensor_fps / batchsize
11726 * Eg: for 240fps at batchSize 4, preview = 60fps
11727 * for 120fps at batchSize 4, preview = 30fps
11728 *
11729 * When video stream is present in configure_streams, preview fps is as per
11730 * the ratio of preview buffers to video buffers requested in process
11731 * capture request
11732 */
11733 mBatchSize = 0;
11734 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11735 fps_range.min_fps = fps_range.video_max_fps;
11736 fps_range.video_min_fps = fps_range.video_max_fps;
11737 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11738 fps_range.max_fps);
11739 if (NAME_NOT_FOUND != val) {
11740 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11742 return BAD_VALUE;
11743 }
11744
11745 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11746 /* If batchmode is currently in progress and the fps changes,
11747 * set the flag to restart the sensor */
11748 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11749 (mHFRVideoFps != fps_range.max_fps)) {
11750 mNeedSensorRestart = true;
11751 }
11752 mHFRVideoFps = fps_range.max_fps;
11753 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11754 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11755 mBatchSize = MAX_HFR_BATCH_SIZE;
11756 }
11757 }
11758 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11759
11760 }
11761 } else {
11762 /* HFR mode is session param in backend/ISP. This should be reset when
11763 * in non-HFR mode */
11764 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11766 return BAD_VALUE;
11767 }
11768 }
11769 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11770 return BAD_VALUE;
11771 }
11772 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11773 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11774 return rc;
11775}
11776
11777/*===========================================================================
11778 * FUNCTION : translateToHalMetadata
11779 *
11780 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11781 *
11782 *
11783 * PARAMETERS :
11784 * @request : request sent from framework
11785 *
11786 *
11787 * RETURN : success: NO_ERROR
11788 * failure:
11789 *==========================================================================*/
11790int QCamera3HardwareInterface::translateToHalMetadata
11791 (const camera3_capture_request_t *request,
11792 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011793 uint32_t snapshotStreamId) {
11794 if (request == nullptr || hal_metadata == nullptr) {
11795 return BAD_VALUE;
11796 }
11797
11798 int64_t minFrameDuration = getMinFrameDuration(request);
11799
11800 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11801 minFrameDuration);
11802}
11803
11804int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11805 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11806 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11807
Thierry Strudel3d639192016-09-09 11:52:26 -070011808 int rc = 0;
11809 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011810 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011811
11812 /* Do not change the order of the following list unless you know what you are
11813 * doing.
11814 * The order is laid out in such a way that parameters in the front of the table
11815 * may be used to override the parameters later in the table. Examples are:
11816 * 1. META_MODE should precede AEC/AWB/AF MODE
11817 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11818 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11819 * 4. Any mode should precede it's corresponding settings
11820 */
11821 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11822 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11823 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11824 rc = BAD_VALUE;
11825 }
11826 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11827 if (rc != NO_ERROR) {
11828 LOGE("extractSceneMode failed");
11829 }
11830 }
11831
11832 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11833 uint8_t fwk_aeMode =
11834 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11835 uint8_t aeMode;
11836 int32_t redeye;
11837
11838 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11839 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011840 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11841 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011842 } else {
11843 aeMode = CAM_AE_MODE_ON;
11844 }
11845 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11846 redeye = 1;
11847 } else {
11848 redeye = 0;
11849 }
11850
11851 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11852 fwk_aeMode);
11853 if (NAME_NOT_FOUND != val) {
11854 int32_t flashMode = (int32_t)val;
11855 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11856 }
11857
11858 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11859 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11860 rc = BAD_VALUE;
11861 }
11862 }
11863
11864 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11865 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11866 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11867 fwk_whiteLevel);
11868 if (NAME_NOT_FOUND != val) {
11869 uint8_t whiteLevel = (uint8_t)val;
11870 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11871 rc = BAD_VALUE;
11872 }
11873 }
11874 }
11875
11876 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11877 uint8_t fwk_cacMode =
11878 frame_settings.find(
11879 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11880 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11881 fwk_cacMode);
11882 if (NAME_NOT_FOUND != val) {
11883 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11884 bool entryAvailable = FALSE;
11885 // Check whether Frameworks set CAC mode is supported in device or not
11886 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11887 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11888 entryAvailable = TRUE;
11889 break;
11890 }
11891 }
11892 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11893 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11894 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11895 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11896 if (entryAvailable == FALSE) {
11897 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11898 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11899 } else {
11900 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11901 // High is not supported and so set the FAST as spec say's underlying
11902 // device implementation can be the same for both modes.
11903 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11904 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11905 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11906 // in order to avoid the fps drop due to high quality
11907 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11908 } else {
11909 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11910 }
11911 }
11912 }
11913 LOGD("Final cacMode is %d", cacMode);
11914 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11915 rc = BAD_VALUE;
11916 }
11917 } else {
11918 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11919 }
11920 }
11921
Thierry Strudel2896d122017-02-23 19:18:03 -080011922 char af_value[PROPERTY_VALUE_MAX];
11923 property_get("persist.camera.af.infinity", af_value, "0");
11924
Jason Lee84ae9972017-02-24 13:24:24 -080011925 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011926 if (atoi(af_value) == 0) {
11927 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011928 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011929 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11930 fwk_focusMode);
11931 if (NAME_NOT_FOUND != val) {
11932 uint8_t focusMode = (uint8_t)val;
11933 LOGD("set focus mode %d", focusMode);
11934 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11935 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11936 rc = BAD_VALUE;
11937 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011938 }
11939 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011940 } else {
11941 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11942 LOGE("Focus forced to infinity %d", focusMode);
11943 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11944 rc = BAD_VALUE;
11945 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011946 }
11947
Jason Lee84ae9972017-02-24 13:24:24 -080011948 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11949 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011950 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11951 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11952 focalDistance)) {
11953 rc = BAD_VALUE;
11954 }
11955 }
11956
11957 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11958 uint8_t fwk_antibandingMode =
11959 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11960 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11961 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11962 if (NAME_NOT_FOUND != val) {
11963 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011964 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11965 if (m60HzZone) {
11966 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11967 } else {
11968 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11969 }
11970 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11972 hal_antibandingMode)) {
11973 rc = BAD_VALUE;
11974 }
11975 }
11976 }
11977
11978 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11979 int32_t expCompensation = frame_settings.find(
11980 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11981 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11982 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11983 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11984 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011985 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011986 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11987 expCompensation)) {
11988 rc = BAD_VALUE;
11989 }
11990 }
11991
11992 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11993 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11994 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11995 rc = BAD_VALUE;
11996 }
11997 }
11998 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11999 rc = setHalFpsRange(frame_settings, hal_metadata);
12000 if (rc != NO_ERROR) {
12001 LOGE("setHalFpsRange failed");
12002 }
12003 }
12004
12005 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12006 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12007 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12008 rc = BAD_VALUE;
12009 }
12010 }
12011
12012 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12013 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12014 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12015 fwk_effectMode);
12016 if (NAME_NOT_FOUND != val) {
12017 uint8_t effectMode = (uint8_t)val;
12018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12019 rc = BAD_VALUE;
12020 }
12021 }
12022 }
12023
12024 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12025 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12026 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12027 colorCorrectMode)) {
12028 rc = BAD_VALUE;
12029 }
12030 }
12031
12032 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12033 cam_color_correct_gains_t colorCorrectGains;
12034 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12035 colorCorrectGains.gains[i] =
12036 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12037 }
12038 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12039 colorCorrectGains)) {
12040 rc = BAD_VALUE;
12041 }
12042 }
12043
12044 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12045 cam_color_correct_matrix_t colorCorrectTransform;
12046 cam_rational_type_t transform_elem;
12047 size_t num = 0;
12048 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12049 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12050 transform_elem.numerator =
12051 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12052 transform_elem.denominator =
12053 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12054 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12055 num++;
12056 }
12057 }
12058 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12059 colorCorrectTransform)) {
12060 rc = BAD_VALUE;
12061 }
12062 }
12063
12064 cam_trigger_t aecTrigger;
12065 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12066 aecTrigger.trigger_id = -1;
12067 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12068 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12069 aecTrigger.trigger =
12070 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12071 aecTrigger.trigger_id =
12072 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12073 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12074 aecTrigger)) {
12075 rc = BAD_VALUE;
12076 }
12077 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12078 aecTrigger.trigger, aecTrigger.trigger_id);
12079 }
12080
12081 /*af_trigger must come with a trigger id*/
12082 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12083 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12084 cam_trigger_t af_trigger;
12085 af_trigger.trigger =
12086 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12087 af_trigger.trigger_id =
12088 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12089 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12090 rc = BAD_VALUE;
12091 }
12092 LOGD("AfTrigger: %d AfTriggerID: %d",
12093 af_trigger.trigger, af_trigger.trigger_id);
12094 }
12095
12096 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12097 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12099 rc = BAD_VALUE;
12100 }
12101 }
12102 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12103 cam_edge_application_t edge_application;
12104 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012105
Thierry Strudel3d639192016-09-09 11:52:26 -070012106 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12107 edge_application.sharpness = 0;
12108 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012109 edge_application.sharpness =
12110 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12111 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12112 int32_t sharpness =
12113 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12114 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12115 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12116 LOGD("Setting edge mode sharpness %d", sharpness);
12117 edge_application.sharpness = sharpness;
12118 }
12119 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012120 }
12121 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12122 rc = BAD_VALUE;
12123 }
12124 }
12125
12126 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12127 int32_t respectFlashMode = 1;
12128 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12129 uint8_t fwk_aeMode =
12130 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012131 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12132 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12133 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012134 respectFlashMode = 0;
12135 LOGH("AE Mode controls flash, ignore android.flash.mode");
12136 }
12137 }
12138 if (respectFlashMode) {
12139 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12140 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12141 LOGH("flash mode after mapping %d", val);
12142 // To check: CAM_INTF_META_FLASH_MODE usage
12143 if (NAME_NOT_FOUND != val) {
12144 uint8_t flashMode = (uint8_t)val;
12145 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12146 rc = BAD_VALUE;
12147 }
12148 }
12149 }
12150 }
12151
12152 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12153 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12154 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12155 rc = BAD_VALUE;
12156 }
12157 }
12158
12159 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12160 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12162 flashFiringTime)) {
12163 rc = BAD_VALUE;
12164 }
12165 }
12166
12167 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12168 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12169 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12170 hotPixelMode)) {
12171 rc = BAD_VALUE;
12172 }
12173 }
12174
12175 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12176 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12177 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12178 lensAperture)) {
12179 rc = BAD_VALUE;
12180 }
12181 }
12182
12183 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12184 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12185 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12186 filterDensity)) {
12187 rc = BAD_VALUE;
12188 }
12189 }
12190
12191 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12192 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12194 focalLength)) {
12195 rc = BAD_VALUE;
12196 }
12197 }
12198
12199 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12200 uint8_t optStabMode =
12201 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12203 optStabMode)) {
12204 rc = BAD_VALUE;
12205 }
12206 }
12207
12208 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12209 uint8_t videoStabMode =
12210 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12211 LOGD("videoStabMode from APP = %d", videoStabMode);
12212 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12213 videoStabMode)) {
12214 rc = BAD_VALUE;
12215 }
12216 }
12217
12218
12219 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12220 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12221 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12222 noiseRedMode)) {
12223 rc = BAD_VALUE;
12224 }
12225 }
12226
12227 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12228 float reprocessEffectiveExposureFactor =
12229 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12231 reprocessEffectiveExposureFactor)) {
12232 rc = BAD_VALUE;
12233 }
12234 }
12235
12236 cam_crop_region_t scalerCropRegion;
12237 bool scalerCropSet = false;
12238 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12239 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12240 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12241 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12242 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12243
12244 // Map coordinate system from active array to sensor output.
12245 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12246 scalerCropRegion.width, scalerCropRegion.height);
12247
12248 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12249 scalerCropRegion)) {
12250 rc = BAD_VALUE;
12251 }
12252 scalerCropSet = true;
12253 }
12254
12255 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12256 int64_t sensorExpTime =
12257 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12258 LOGD("setting sensorExpTime %lld", sensorExpTime);
12259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12260 sensorExpTime)) {
12261 rc = BAD_VALUE;
12262 }
12263 }
12264
12265 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12266 int64_t sensorFrameDuration =
12267 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012268 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12269 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12270 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12271 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12272 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12273 sensorFrameDuration)) {
12274 rc = BAD_VALUE;
12275 }
12276 }
12277
12278 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12279 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12280 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12281 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12282 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12283 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12284 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12285 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12286 sensorSensitivity)) {
12287 rc = BAD_VALUE;
12288 }
12289 }
12290
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012291#ifndef USE_HAL_3_3
12292 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12293 int32_t ispSensitivity =
12294 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12295 if (ispSensitivity <
12296 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12297 ispSensitivity =
12298 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12299 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12300 }
12301 if (ispSensitivity >
12302 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12303 ispSensitivity =
12304 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12305 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12306 }
12307 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12308 ispSensitivity)) {
12309 rc = BAD_VALUE;
12310 }
12311 }
12312#endif
12313
Thierry Strudel3d639192016-09-09 11:52:26 -070012314 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12315 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12316 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12317 rc = BAD_VALUE;
12318 }
12319 }
12320
12321 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12322 uint8_t fwk_facedetectMode =
12323 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12324
12325 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12326 fwk_facedetectMode);
12327
12328 if (NAME_NOT_FOUND != val) {
12329 uint8_t facedetectMode = (uint8_t)val;
12330 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12331 facedetectMode)) {
12332 rc = BAD_VALUE;
12333 }
12334 }
12335 }
12336
Thierry Strudel54dc9782017-02-15 12:12:10 -080012337 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012338 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012339 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012340 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12341 histogramMode)) {
12342 rc = BAD_VALUE;
12343 }
12344 }
12345
12346 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12347 uint8_t sharpnessMapMode =
12348 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12349 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12350 sharpnessMapMode)) {
12351 rc = BAD_VALUE;
12352 }
12353 }
12354
12355 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12356 uint8_t tonemapMode =
12357 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12358 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12359 rc = BAD_VALUE;
12360 }
12361 }
12362 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12363 /*All tonemap channels will have the same number of points*/
12364 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12365 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12366 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12367 cam_rgb_tonemap_curves tonemapCurves;
12368 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12369 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12370 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12371 tonemapCurves.tonemap_points_cnt,
12372 CAM_MAX_TONEMAP_CURVE_SIZE);
12373 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12374 }
12375
12376 /* ch0 = G*/
12377 size_t point = 0;
12378 cam_tonemap_curve_t tonemapCurveGreen;
12379 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12380 for (size_t j = 0; j < 2; j++) {
12381 tonemapCurveGreen.tonemap_points[i][j] =
12382 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12383 point++;
12384 }
12385 }
12386 tonemapCurves.curves[0] = tonemapCurveGreen;
12387
12388 /* ch 1 = B */
12389 point = 0;
12390 cam_tonemap_curve_t tonemapCurveBlue;
12391 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12392 for (size_t j = 0; j < 2; j++) {
12393 tonemapCurveBlue.tonemap_points[i][j] =
12394 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12395 point++;
12396 }
12397 }
12398 tonemapCurves.curves[1] = tonemapCurveBlue;
12399
12400 /* ch 2 = R */
12401 point = 0;
12402 cam_tonemap_curve_t tonemapCurveRed;
12403 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12404 for (size_t j = 0; j < 2; j++) {
12405 tonemapCurveRed.tonemap_points[i][j] =
12406 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12407 point++;
12408 }
12409 }
12410 tonemapCurves.curves[2] = tonemapCurveRed;
12411
12412 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12413 tonemapCurves)) {
12414 rc = BAD_VALUE;
12415 }
12416 }
12417
12418 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12419 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12420 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12421 captureIntent)) {
12422 rc = BAD_VALUE;
12423 }
12424 }
12425
12426 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12427 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12428 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12429 blackLevelLock)) {
12430 rc = BAD_VALUE;
12431 }
12432 }
12433
12434 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12435 uint8_t lensShadingMapMode =
12436 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12437 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12438 lensShadingMapMode)) {
12439 rc = BAD_VALUE;
12440 }
12441 }
12442
12443 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12444 cam_area_t roi;
12445 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012446 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012447
12448 // Map coordinate system from active array to sensor output.
12449 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12450 roi.rect.height);
12451
12452 if (scalerCropSet) {
12453 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12454 }
12455 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12456 rc = BAD_VALUE;
12457 }
12458 }
12459
12460 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12461 cam_area_t roi;
12462 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012463 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012464
12465 // Map coordinate system from active array to sensor output.
12466 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12467 roi.rect.height);
12468
12469 if (scalerCropSet) {
12470 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12471 }
12472 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12473 rc = BAD_VALUE;
12474 }
12475 }
12476
12477 // CDS for non-HFR non-video mode
12478 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12479 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12480 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12481 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12482 LOGE("Invalid CDS mode %d!", *fwk_cds);
12483 } else {
12484 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12485 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12486 rc = BAD_VALUE;
12487 }
12488 }
12489 }
12490
Thierry Strudel04e026f2016-10-10 11:27:36 -070012491 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012492 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012493 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012494 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12495 }
12496 if (m_bVideoHdrEnabled)
12497 vhdr = CAM_VIDEO_HDR_MODE_ON;
12498
Thierry Strudel54dc9782017-02-15 12:12:10 -080012499 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12500
12501 if(vhdr != curr_hdr_state)
12502 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12503
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012504 rc = setVideoHdrMode(mParameters, vhdr);
12505 if (rc != NO_ERROR) {
12506 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012507 }
12508
12509 //IR
12510 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12511 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12512 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012513 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12514 uint8_t isIRon = 0;
12515
12516 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012517 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12518 LOGE("Invalid IR mode %d!", fwk_ir);
12519 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012520 if(isIRon != curr_ir_state )
12521 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12522
Thierry Strudel04e026f2016-10-10 11:27:36 -070012523 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12524 CAM_INTF_META_IR_MODE, fwk_ir)) {
12525 rc = BAD_VALUE;
12526 }
12527 }
12528 }
12529
Thierry Strudel54dc9782017-02-15 12:12:10 -080012530 //Binning Correction Mode
12531 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12532 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12533 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12534 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12535 || (0 > fwk_binning_correction)) {
12536 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12537 } else {
12538 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12539 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12540 rc = BAD_VALUE;
12541 }
12542 }
12543 }
12544
Thierry Strudel269c81a2016-10-12 12:13:59 -070012545 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12546 float aec_speed;
12547 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12548 LOGD("AEC Speed :%f", aec_speed);
12549 if ( aec_speed < 0 ) {
12550 LOGE("Invalid AEC mode %f!", aec_speed);
12551 } else {
12552 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12553 aec_speed)) {
12554 rc = BAD_VALUE;
12555 }
12556 }
12557 }
12558
12559 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12560 float awb_speed;
12561 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12562 LOGD("AWB Speed :%f", awb_speed);
12563 if ( awb_speed < 0 ) {
12564 LOGE("Invalid AWB mode %f!", awb_speed);
12565 } else {
12566 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12567 awb_speed)) {
12568 rc = BAD_VALUE;
12569 }
12570 }
12571 }
12572
Thierry Strudel3d639192016-09-09 11:52:26 -070012573 // TNR
12574 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12575 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12576 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012577 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012578 cam_denoise_param_t tnr;
12579 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12580 tnr.process_plates =
12581 (cam_denoise_process_type_t)frame_settings.find(
12582 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12583 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012584
12585 if(b_TnrRequested != curr_tnr_state)
12586 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12587
Thierry Strudel3d639192016-09-09 11:52:26 -070012588 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12589 rc = BAD_VALUE;
12590 }
12591 }
12592
Thierry Strudel54dc9782017-02-15 12:12:10 -080012593 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012594 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012595 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012596 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12597 *exposure_metering_mode)) {
12598 rc = BAD_VALUE;
12599 }
12600 }
12601
Thierry Strudel3d639192016-09-09 11:52:26 -070012602 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12603 int32_t fwk_testPatternMode =
12604 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12605 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12606 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12607
12608 if (NAME_NOT_FOUND != testPatternMode) {
12609 cam_test_pattern_data_t testPatternData;
12610 memset(&testPatternData, 0, sizeof(testPatternData));
12611 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12612 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12613 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12614 int32_t *fwk_testPatternData =
12615 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12616 testPatternData.r = fwk_testPatternData[0];
12617 testPatternData.b = fwk_testPatternData[3];
12618 switch (gCamCapability[mCameraId]->color_arrangement) {
12619 case CAM_FILTER_ARRANGEMENT_RGGB:
12620 case CAM_FILTER_ARRANGEMENT_GRBG:
12621 testPatternData.gr = fwk_testPatternData[1];
12622 testPatternData.gb = fwk_testPatternData[2];
12623 break;
12624 case CAM_FILTER_ARRANGEMENT_GBRG:
12625 case CAM_FILTER_ARRANGEMENT_BGGR:
12626 testPatternData.gr = fwk_testPatternData[2];
12627 testPatternData.gb = fwk_testPatternData[1];
12628 break;
12629 default:
12630 LOGE("color arrangement %d is not supported",
12631 gCamCapability[mCameraId]->color_arrangement);
12632 break;
12633 }
12634 }
12635 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12636 testPatternData)) {
12637 rc = BAD_VALUE;
12638 }
12639 } else {
12640 LOGE("Invalid framework sensor test pattern mode %d",
12641 fwk_testPatternMode);
12642 }
12643 }
12644
12645 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12646 size_t count = 0;
12647 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12648 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12649 gps_coords.data.d, gps_coords.count, count);
12650 if (gps_coords.count != count) {
12651 rc = BAD_VALUE;
12652 }
12653 }
12654
12655 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12656 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12657 size_t count = 0;
12658 const char *gps_methods_src = (const char *)
12659 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12660 memset(gps_methods, '\0', sizeof(gps_methods));
12661 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12662 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12663 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12664 if (GPS_PROCESSING_METHOD_SIZE != count) {
12665 rc = BAD_VALUE;
12666 }
12667 }
12668
12669 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12670 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12672 gps_timestamp)) {
12673 rc = BAD_VALUE;
12674 }
12675 }
12676
12677 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12678 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12679 cam_rotation_info_t rotation_info;
12680 if (orientation == 0) {
12681 rotation_info.rotation = ROTATE_0;
12682 } else if (orientation == 90) {
12683 rotation_info.rotation = ROTATE_90;
12684 } else if (orientation == 180) {
12685 rotation_info.rotation = ROTATE_180;
12686 } else if (orientation == 270) {
12687 rotation_info.rotation = ROTATE_270;
12688 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012689 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012690 rotation_info.streamId = snapshotStreamId;
12691 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12692 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12693 rc = BAD_VALUE;
12694 }
12695 }
12696
12697 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12698 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12700 rc = BAD_VALUE;
12701 }
12702 }
12703
12704 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12705 uint32_t thumb_quality = (uint32_t)
12706 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12707 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12708 thumb_quality)) {
12709 rc = BAD_VALUE;
12710 }
12711 }
12712
12713 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12714 cam_dimension_t dim;
12715 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12716 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12718 rc = BAD_VALUE;
12719 }
12720 }
12721
12722 // Internal metadata
12723 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12724 size_t count = 0;
12725 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12726 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12727 privatedata.data.i32, privatedata.count, count);
12728 if (privatedata.count != count) {
12729 rc = BAD_VALUE;
12730 }
12731 }
12732
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012733 // ISO/Exposure Priority
12734 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12735 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12736 cam_priority_mode_t mode =
12737 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12738 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12739 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12740 use_iso_exp_pty.previewOnly = FALSE;
12741 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12742 use_iso_exp_pty.value = *ptr;
12743
12744 if(CAM_ISO_PRIORITY == mode) {
12745 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12746 use_iso_exp_pty)) {
12747 rc = BAD_VALUE;
12748 }
12749 }
12750 else {
12751 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12752 use_iso_exp_pty)) {
12753 rc = BAD_VALUE;
12754 }
12755 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012756
12757 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12758 rc = BAD_VALUE;
12759 }
12760 }
12761 } else {
12762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12763 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012764 }
12765 }
12766
12767 // Saturation
12768 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12769 int32_t* use_saturation =
12770 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12771 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12772 rc = BAD_VALUE;
12773 }
12774 }
12775
Thierry Strudel3d639192016-09-09 11:52:26 -070012776 // EV step
12777 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12778 gCamCapability[mCameraId]->exp_compensation_step)) {
12779 rc = BAD_VALUE;
12780 }
12781
12782 // CDS info
12783 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12784 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12785 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12786
12787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12788 CAM_INTF_META_CDS_DATA, *cdsData)) {
12789 rc = BAD_VALUE;
12790 }
12791 }
12792
Shuzhen Wang19463d72016-03-08 11:09:52 -080012793 // Hybrid AE
12794 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12795 uint8_t *hybrid_ae = (uint8_t *)
12796 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12797
12798 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12799 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12800 rc = BAD_VALUE;
12801 }
12802 }
12803
Shuzhen Wang14415f52016-11-16 18:26:18 -080012804 // Histogram
12805 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12806 uint8_t histogramMode =
12807 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12808 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12809 histogramMode)) {
12810 rc = BAD_VALUE;
12811 }
12812 }
12813
12814 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12815 int32_t histogramBins =
12816 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12817 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12818 histogramBins)) {
12819 rc = BAD_VALUE;
12820 }
12821 }
12822
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012823 // Tracking AF
12824 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12825 uint8_t trackingAfTrigger =
12826 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12827 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12828 trackingAfTrigger)) {
12829 rc = BAD_VALUE;
12830 }
12831 }
12832
Thierry Strudel3d639192016-09-09 11:52:26 -070012833 return rc;
12834}
12835
12836/*===========================================================================
12837 * FUNCTION : captureResultCb
12838 *
12839 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12840 *
12841 * PARAMETERS :
12842 * @frame : frame information from mm-camera-interface
12843 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12844 * @userdata: userdata
12845 *
12846 * RETURN : NONE
12847 *==========================================================================*/
12848void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12849 camera3_stream_buffer_t *buffer,
12850 uint32_t frame_number, bool isInputBuffer, void *userdata)
12851{
12852 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12853 if (hw == NULL) {
12854 LOGE("Invalid hw %p", hw);
12855 return;
12856 }
12857
12858 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12859 return;
12860}
12861
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012862/*===========================================================================
12863 * FUNCTION : setBufferErrorStatus
12864 *
12865 * DESCRIPTION: Callback handler for channels to report any buffer errors
12866 *
12867 * PARAMETERS :
12868 * @ch : Channel on which buffer error is reported from
12869 * @frame_number : frame number on which buffer error is reported on
12870 * @buffer_status : buffer error status
12871 * @userdata: userdata
12872 *
12873 * RETURN : NONE
12874 *==========================================================================*/
12875void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12876 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12877{
12878 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12879 if (hw == NULL) {
12880 LOGE("Invalid hw %p", hw);
12881 return;
12882 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012883
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012884 hw->setBufferErrorStatus(ch, frame_number, err);
12885 return;
12886}
12887
12888void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12889 uint32_t frameNumber, camera3_buffer_status_t err)
12890{
12891 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12892 pthread_mutex_lock(&mMutex);
12893
12894 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12895 if (req.frame_number != frameNumber)
12896 continue;
12897 for (auto& k : req.mPendingBufferList) {
12898 if(k.stream->priv == ch) {
12899 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12900 }
12901 }
12902 }
12903
12904 pthread_mutex_unlock(&mMutex);
12905 return;
12906}
Thierry Strudel3d639192016-09-09 11:52:26 -070012907/*===========================================================================
12908 * FUNCTION : initialize
12909 *
12910 * DESCRIPTION: Pass framework callback pointers to HAL
12911 *
12912 * PARAMETERS :
12913 *
12914 *
12915 * RETURN : Success : 0
12916 * Failure: -ENODEV
12917 *==========================================================================*/
12918
12919int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12920 const camera3_callback_ops_t *callback_ops)
12921{
12922 LOGD("E");
12923 QCamera3HardwareInterface *hw =
12924 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12925 if (!hw) {
12926 LOGE("NULL camera device");
12927 return -ENODEV;
12928 }
12929
12930 int rc = hw->initialize(callback_ops);
12931 LOGD("X");
12932 return rc;
12933}
12934
12935/*===========================================================================
12936 * FUNCTION : configure_streams
12937 *
12938 * DESCRIPTION:
12939 *
12940 * PARAMETERS :
12941 *
12942 *
12943 * RETURN : Success: 0
12944 * Failure: -EINVAL (if stream configuration is invalid)
12945 * -ENODEV (fatal error)
12946 *==========================================================================*/
12947
12948int QCamera3HardwareInterface::configure_streams(
12949 const struct camera3_device *device,
12950 camera3_stream_configuration_t *stream_list)
12951{
12952 LOGD("E");
12953 QCamera3HardwareInterface *hw =
12954 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12955 if (!hw) {
12956 LOGE("NULL camera device");
12957 return -ENODEV;
12958 }
12959 int rc = hw->configureStreams(stream_list);
12960 LOGD("X");
12961 return rc;
12962}
12963
12964/*===========================================================================
12965 * FUNCTION : construct_default_request_settings
12966 *
12967 * DESCRIPTION: Configure a settings buffer to meet the required use case
12968 *
12969 * PARAMETERS :
12970 *
12971 *
12972 * RETURN : Success: Return valid metadata
12973 * Failure: Return NULL
12974 *==========================================================================*/
12975const camera_metadata_t* QCamera3HardwareInterface::
12976 construct_default_request_settings(const struct camera3_device *device,
12977 int type)
12978{
12979
12980 LOGD("E");
12981 camera_metadata_t* fwk_metadata = NULL;
12982 QCamera3HardwareInterface *hw =
12983 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12984 if (!hw) {
12985 LOGE("NULL camera device");
12986 return NULL;
12987 }
12988
12989 fwk_metadata = hw->translateCapabilityToMetadata(type);
12990
12991 LOGD("X");
12992 return fwk_metadata;
12993}
12994
12995/*===========================================================================
12996 * FUNCTION : process_capture_request
12997 *
12998 * DESCRIPTION:
12999 *
13000 * PARAMETERS :
13001 *
13002 *
13003 * RETURN :
13004 *==========================================================================*/
13005int QCamera3HardwareInterface::process_capture_request(
13006 const struct camera3_device *device,
13007 camera3_capture_request_t *request)
13008{
13009 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013010 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013011 QCamera3HardwareInterface *hw =
13012 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13013 if (!hw) {
13014 LOGE("NULL camera device");
13015 return -EINVAL;
13016 }
13017
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013018 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013019 LOGD("X");
13020 return rc;
13021}
13022
13023/*===========================================================================
13024 * FUNCTION : dump
13025 *
13026 * DESCRIPTION:
13027 *
13028 * PARAMETERS :
13029 *
13030 *
13031 * RETURN :
13032 *==========================================================================*/
13033
13034void QCamera3HardwareInterface::dump(
13035 const struct camera3_device *device, int fd)
13036{
13037 /* Log level property is read when "adb shell dumpsys media.camera" is
13038 called so that the log level can be controlled without restarting
13039 the media server */
13040 getLogLevel();
13041
13042 LOGD("E");
13043 QCamera3HardwareInterface *hw =
13044 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13045 if (!hw) {
13046 LOGE("NULL camera device");
13047 return;
13048 }
13049
13050 hw->dump(fd);
13051 LOGD("X");
13052 return;
13053}
13054
13055/*===========================================================================
13056 * FUNCTION : flush
13057 *
13058 * DESCRIPTION:
13059 *
13060 * PARAMETERS :
13061 *
13062 *
13063 * RETURN :
13064 *==========================================================================*/
13065
13066int QCamera3HardwareInterface::flush(
13067 const struct camera3_device *device)
13068{
13069 int rc;
13070 LOGD("E");
13071 QCamera3HardwareInterface *hw =
13072 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13073 if (!hw) {
13074 LOGE("NULL camera device");
13075 return -EINVAL;
13076 }
13077
13078 pthread_mutex_lock(&hw->mMutex);
13079 // Validate current state
13080 switch (hw->mState) {
13081 case STARTED:
13082 /* valid state */
13083 break;
13084
13085 case ERROR:
13086 pthread_mutex_unlock(&hw->mMutex);
13087 hw->handleCameraDeviceError();
13088 return -ENODEV;
13089
13090 default:
13091 LOGI("Flush returned during state %d", hw->mState);
13092 pthread_mutex_unlock(&hw->mMutex);
13093 return 0;
13094 }
13095 pthread_mutex_unlock(&hw->mMutex);
13096
13097 rc = hw->flush(true /* restart channels */ );
13098 LOGD("X");
13099 return rc;
13100}
13101
13102/*===========================================================================
13103 * FUNCTION : close_camera_device
13104 *
13105 * DESCRIPTION:
13106 *
13107 * PARAMETERS :
13108 *
13109 *
13110 * RETURN :
13111 *==========================================================================*/
13112int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13113{
13114 int ret = NO_ERROR;
13115 QCamera3HardwareInterface *hw =
13116 reinterpret_cast<QCamera3HardwareInterface *>(
13117 reinterpret_cast<camera3_device_t *>(device)->priv);
13118 if (!hw) {
13119 LOGE("NULL camera device");
13120 return BAD_VALUE;
13121 }
13122
13123 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13124 delete hw;
13125 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013126 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013127 return ret;
13128}
13129
13130/*===========================================================================
13131 * FUNCTION : getWaveletDenoiseProcessPlate
13132 *
13133 * DESCRIPTION: query wavelet denoise process plate
13134 *
13135 * PARAMETERS : None
13136 *
13137 * RETURN : WNR prcocess plate value
13138 *==========================================================================*/
13139cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13140{
13141 char prop[PROPERTY_VALUE_MAX];
13142 memset(prop, 0, sizeof(prop));
13143 property_get("persist.denoise.process.plates", prop, "0");
13144 int processPlate = atoi(prop);
13145 switch(processPlate) {
13146 case 0:
13147 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13148 case 1:
13149 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13150 case 2:
13151 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13152 case 3:
13153 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13154 default:
13155 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13156 }
13157}
13158
13159
13160/*===========================================================================
13161 * FUNCTION : getTemporalDenoiseProcessPlate
13162 *
13163 * DESCRIPTION: query temporal denoise process plate
13164 *
13165 * PARAMETERS : None
13166 *
13167 * RETURN : TNR prcocess plate value
13168 *==========================================================================*/
13169cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13170{
13171 char prop[PROPERTY_VALUE_MAX];
13172 memset(prop, 0, sizeof(prop));
13173 property_get("persist.tnr.process.plates", prop, "0");
13174 int processPlate = atoi(prop);
13175 switch(processPlate) {
13176 case 0:
13177 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13178 case 1:
13179 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13180 case 2:
13181 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13182 case 3:
13183 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13184 default:
13185 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13186 }
13187}
13188
13189
13190/*===========================================================================
13191 * FUNCTION : extractSceneMode
13192 *
13193 * DESCRIPTION: Extract scene mode from frameworks set metadata
13194 *
13195 * PARAMETERS :
13196 * @frame_settings: CameraMetadata reference
13197 * @metaMode: ANDROID_CONTORL_MODE
13198 * @hal_metadata: hal metadata structure
13199 *
13200 * RETURN : None
13201 *==========================================================================*/
13202int32_t QCamera3HardwareInterface::extractSceneMode(
13203 const CameraMetadata &frame_settings, uint8_t metaMode,
13204 metadata_buffer_t *hal_metadata)
13205{
13206 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013207 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13208
13209 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13210 LOGD("Ignoring control mode OFF_KEEP_STATE");
13211 return NO_ERROR;
13212 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013213
13214 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13215 camera_metadata_ro_entry entry =
13216 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13217 if (0 == entry.count)
13218 return rc;
13219
13220 uint8_t fwk_sceneMode = entry.data.u8[0];
13221
13222 int val = lookupHalName(SCENE_MODES_MAP,
13223 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13224 fwk_sceneMode);
13225 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013226 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013227 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013228 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013229 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013230
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013231 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13232 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13233 }
13234
13235 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13236 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013237 cam_hdr_param_t hdr_params;
13238 hdr_params.hdr_enable = 1;
13239 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13240 hdr_params.hdr_need_1x = false;
13241 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13242 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13243 rc = BAD_VALUE;
13244 }
13245 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013246
Thierry Strudel3d639192016-09-09 11:52:26 -070013247 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13248 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13249 rc = BAD_VALUE;
13250 }
13251 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013252
13253 if (mForceHdrSnapshot) {
13254 cam_hdr_param_t hdr_params;
13255 hdr_params.hdr_enable = 1;
13256 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13257 hdr_params.hdr_need_1x = false;
13258 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13259 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13260 rc = BAD_VALUE;
13261 }
13262 }
13263
Thierry Strudel3d639192016-09-09 11:52:26 -070013264 return rc;
13265}
13266
13267/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013268 * FUNCTION : setVideoHdrMode
13269 *
13270 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13271 *
13272 * PARAMETERS :
13273 * @hal_metadata: hal metadata structure
13274 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13275 *
13276 * RETURN : None
13277 *==========================================================================*/
13278int32_t QCamera3HardwareInterface::setVideoHdrMode(
13279 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13280{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013281 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13282 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13283 }
13284
13285 LOGE("Invalid Video HDR mode %d!", vhdr);
13286 return BAD_VALUE;
13287}
13288
13289/*===========================================================================
13290 * FUNCTION : setSensorHDR
13291 *
13292 * DESCRIPTION: Enable/disable sensor HDR.
13293 *
13294 * PARAMETERS :
13295 * @hal_metadata: hal metadata structure
13296 * @enable: boolean whether to enable/disable sensor HDR
13297 *
13298 * RETURN : None
13299 *==========================================================================*/
13300int32_t QCamera3HardwareInterface::setSensorHDR(
13301 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13302{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013303 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013304 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13305
13306 if (enable) {
13307 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13308 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13309 #ifdef _LE_CAMERA_
13310 //Default to staggered HDR for IOT
13311 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13312 #else
13313 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13314 #endif
13315 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13316 }
13317
13318 bool isSupported = false;
13319 switch (sensor_hdr) {
13320 case CAM_SENSOR_HDR_IN_SENSOR:
13321 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13322 CAM_QCOM_FEATURE_SENSOR_HDR) {
13323 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013324 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013325 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013326 break;
13327 case CAM_SENSOR_HDR_ZIGZAG:
13328 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13329 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13330 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013331 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013332 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013333 break;
13334 case CAM_SENSOR_HDR_STAGGERED:
13335 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13336 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13337 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013338 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013339 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013340 break;
13341 case CAM_SENSOR_HDR_OFF:
13342 isSupported = true;
13343 LOGD("Turning off sensor HDR");
13344 break;
13345 default:
13346 LOGE("HDR mode %d not supported", sensor_hdr);
13347 rc = BAD_VALUE;
13348 break;
13349 }
13350
13351 if(isSupported) {
13352 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13353 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13354 rc = BAD_VALUE;
13355 } else {
13356 if(!isVideoHdrEnable)
13357 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013358 }
13359 }
13360 return rc;
13361}
13362
13363/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013364 * FUNCTION : needRotationReprocess
13365 *
13366 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13367 *
13368 * PARAMETERS : none
13369 *
13370 * RETURN : true: needed
13371 * false: no need
13372 *==========================================================================*/
13373bool QCamera3HardwareInterface::needRotationReprocess()
13374{
13375 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13376 // current rotation is not zero, and pp has the capability to process rotation
13377 LOGH("need do reprocess for rotation");
13378 return true;
13379 }
13380
13381 return false;
13382}
13383
13384/*===========================================================================
13385 * FUNCTION : needReprocess
13386 *
13387 * DESCRIPTION: if reprocess in needed
13388 *
13389 * PARAMETERS : none
13390 *
13391 * RETURN : true: needed
13392 * false: no need
13393 *==========================================================================*/
13394bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13395{
13396 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13397 // TODO: add for ZSL HDR later
13398 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13399 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13400 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13401 return true;
13402 } else {
13403 LOGH("already post processed frame");
13404 return false;
13405 }
13406 }
13407 return needRotationReprocess();
13408}
13409
13410/*===========================================================================
13411 * FUNCTION : needJpegExifRotation
13412 *
13413 * DESCRIPTION: if rotation from jpeg is needed
13414 *
13415 * PARAMETERS : none
13416 *
13417 * RETURN : true: needed
13418 * false: no need
13419 *==========================================================================*/
13420bool QCamera3HardwareInterface::needJpegExifRotation()
13421{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013422 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013423 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13424 LOGD("Need use Jpeg EXIF Rotation");
13425 return true;
13426 }
13427 return false;
13428}
13429
13430/*===========================================================================
13431 * FUNCTION : addOfflineReprocChannel
13432 *
13433 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13434 * coming from input channel
13435 *
13436 * PARAMETERS :
13437 * @config : reprocess configuration
13438 * @inputChHandle : pointer to the input (source) channel
13439 *
13440 *
13441 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13442 *==========================================================================*/
13443QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13444 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13445{
13446 int32_t rc = NO_ERROR;
13447 QCamera3ReprocessChannel *pChannel = NULL;
13448
13449 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013450 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13451 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013452 if (NULL == pChannel) {
13453 LOGE("no mem for reprocess channel");
13454 return NULL;
13455 }
13456
13457 rc = pChannel->initialize(IS_TYPE_NONE);
13458 if (rc != NO_ERROR) {
13459 LOGE("init reprocess channel failed, ret = %d", rc);
13460 delete pChannel;
13461 return NULL;
13462 }
13463
13464 // pp feature config
13465 cam_pp_feature_config_t pp_config;
13466 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13467
13468 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13469 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13470 & CAM_QCOM_FEATURE_DSDN) {
13471 //Use CPP CDS incase h/w supports it.
13472 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13473 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13474 }
13475 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13476 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13477 }
13478
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013479 if (config.hdr_param.hdr_enable) {
13480 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13481 pp_config.hdr_param = config.hdr_param;
13482 }
13483
13484 if (mForceHdrSnapshot) {
13485 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13486 pp_config.hdr_param.hdr_enable = 1;
13487 pp_config.hdr_param.hdr_need_1x = 0;
13488 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13489 }
13490
Thierry Strudel3d639192016-09-09 11:52:26 -070013491 rc = pChannel->addReprocStreamsFromSource(pp_config,
13492 config,
13493 IS_TYPE_NONE,
13494 mMetadataChannel);
13495
13496 if (rc != NO_ERROR) {
13497 delete pChannel;
13498 return NULL;
13499 }
13500 return pChannel;
13501}
13502
13503/*===========================================================================
13504 * FUNCTION : getMobicatMask
13505 *
13506 * DESCRIPTION: returns mobicat mask
13507 *
13508 * PARAMETERS : none
13509 *
13510 * RETURN : mobicat mask
13511 *
13512 *==========================================================================*/
13513uint8_t QCamera3HardwareInterface::getMobicatMask()
13514{
13515 return m_MobicatMask;
13516}
13517
13518/*===========================================================================
13519 * FUNCTION : setMobicat
13520 *
13521 * DESCRIPTION: set Mobicat on/off.
13522 *
13523 * PARAMETERS :
13524 * @params : none
13525 *
13526 * RETURN : int32_t type of status
13527 * NO_ERROR -- success
13528 * none-zero failure code
13529 *==========================================================================*/
13530int32_t QCamera3HardwareInterface::setMobicat()
13531{
13532 char value [PROPERTY_VALUE_MAX];
13533 property_get("persist.camera.mobicat", value, "0");
13534 int32_t ret = NO_ERROR;
13535 uint8_t enableMobi = (uint8_t)atoi(value);
13536
13537 if (enableMobi) {
13538 tune_cmd_t tune_cmd;
13539 tune_cmd.type = SET_RELOAD_CHROMATIX;
13540 tune_cmd.module = MODULE_ALL;
13541 tune_cmd.value = TRUE;
13542 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13543 CAM_INTF_PARM_SET_VFE_COMMAND,
13544 tune_cmd);
13545
13546 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13547 CAM_INTF_PARM_SET_PP_COMMAND,
13548 tune_cmd);
13549 }
13550 m_MobicatMask = enableMobi;
13551
13552 return ret;
13553}
13554
13555/*===========================================================================
13556* FUNCTION : getLogLevel
13557*
13558* DESCRIPTION: Reads the log level property into a variable
13559*
13560* PARAMETERS :
13561* None
13562*
13563* RETURN :
13564* None
13565*==========================================================================*/
13566void QCamera3HardwareInterface::getLogLevel()
13567{
13568 char prop[PROPERTY_VALUE_MAX];
13569 uint32_t globalLogLevel = 0;
13570
13571 property_get("persist.camera.hal.debug", prop, "0");
13572 int val = atoi(prop);
13573 if (0 <= val) {
13574 gCamHal3LogLevel = (uint32_t)val;
13575 }
13576
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013577 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013578 gKpiDebugLevel = atoi(prop);
13579
13580 property_get("persist.camera.global.debug", prop, "0");
13581 val = atoi(prop);
13582 if (0 <= val) {
13583 globalLogLevel = (uint32_t)val;
13584 }
13585
13586 /* Highest log level among hal.logs and global.logs is selected */
13587 if (gCamHal3LogLevel < globalLogLevel)
13588 gCamHal3LogLevel = globalLogLevel;
13589
13590 return;
13591}
13592
13593/*===========================================================================
13594 * FUNCTION : validateStreamRotations
13595 *
13596 * DESCRIPTION: Check if the rotations requested are supported
13597 *
13598 * PARAMETERS :
13599 * @stream_list : streams to be configured
13600 *
13601 * RETURN : NO_ERROR on success
13602 * -EINVAL on failure
13603 *
13604 *==========================================================================*/
13605int QCamera3HardwareInterface::validateStreamRotations(
13606 camera3_stream_configuration_t *streamList)
13607{
13608 int rc = NO_ERROR;
13609
13610 /*
13611 * Loop through all streams requested in configuration
13612 * Check if unsupported rotations have been requested on any of them
13613 */
13614 for (size_t j = 0; j < streamList->num_streams; j++){
13615 camera3_stream_t *newStream = streamList->streams[j];
13616
13617 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13618 bool isImplDef = (newStream->format ==
13619 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13620 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13621 isImplDef);
13622
13623 if (isRotated && (!isImplDef || isZsl)) {
13624 LOGE("Error: Unsupported rotation of %d requested for stream"
13625 "type:%d and stream format:%d",
13626 newStream->rotation, newStream->stream_type,
13627 newStream->format);
13628 rc = -EINVAL;
13629 break;
13630 }
13631 }
13632
13633 return rc;
13634}
13635
13636/*===========================================================================
13637* FUNCTION : getFlashInfo
13638*
13639* DESCRIPTION: Retrieve information about whether the device has a flash.
13640*
13641* PARAMETERS :
13642* @cameraId : Camera id to query
13643* @hasFlash : Boolean indicating whether there is a flash device
13644* associated with given camera
13645* @flashNode : If a flash device exists, this will be its device node.
13646*
13647* RETURN :
13648* None
13649*==========================================================================*/
13650void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13651 bool& hasFlash,
13652 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13653{
13654 cam_capability_t* camCapability = gCamCapability[cameraId];
13655 if (NULL == camCapability) {
13656 hasFlash = false;
13657 flashNode[0] = '\0';
13658 } else {
13659 hasFlash = camCapability->flash_available;
13660 strlcpy(flashNode,
13661 (char*)camCapability->flash_dev_name,
13662 QCAMERA_MAX_FILEPATH_LENGTH);
13663 }
13664}
13665
13666/*===========================================================================
13667* FUNCTION : getEepromVersionInfo
13668*
13669* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13670*
13671* PARAMETERS : None
13672*
13673* RETURN : string describing EEPROM version
13674* "\0" if no such info available
13675*==========================================================================*/
13676const char *QCamera3HardwareInterface::getEepromVersionInfo()
13677{
13678 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13679}
13680
13681/*===========================================================================
13682* FUNCTION : getLdafCalib
13683*
13684* DESCRIPTION: Retrieve Laser AF calibration data
13685*
13686* PARAMETERS : None
13687*
13688* RETURN : Two uint32_t describing laser AF calibration data
13689* NULL if none is available.
13690*==========================================================================*/
13691const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13692{
13693 if (mLdafCalibExist) {
13694 return &mLdafCalib[0];
13695 } else {
13696 return NULL;
13697 }
13698}
13699
13700/*===========================================================================
13701 * FUNCTION : dynamicUpdateMetaStreamInfo
13702 *
13703 * DESCRIPTION: This function:
13704 * (1) stops all the channels
13705 * (2) returns error on pending requests and buffers
13706 * (3) sends metastream_info in setparams
13707 * (4) starts all channels
13708 * This is useful when sensor has to be restarted to apply any
13709 * settings such as frame rate from a different sensor mode
13710 *
13711 * PARAMETERS : None
13712 *
13713 * RETURN : NO_ERROR on success
13714 * Error codes on failure
13715 *
13716 *==========================================================================*/
13717int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13718{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013719 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013720 int rc = NO_ERROR;
13721
13722 LOGD("E");
13723
13724 rc = stopAllChannels();
13725 if (rc < 0) {
13726 LOGE("stopAllChannels failed");
13727 return rc;
13728 }
13729
13730 rc = notifyErrorForPendingRequests();
13731 if (rc < 0) {
13732 LOGE("notifyErrorForPendingRequests failed");
13733 return rc;
13734 }
13735
13736 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13737 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13738 "Format:%d",
13739 mStreamConfigInfo.type[i],
13740 mStreamConfigInfo.stream_sizes[i].width,
13741 mStreamConfigInfo.stream_sizes[i].height,
13742 mStreamConfigInfo.postprocess_mask[i],
13743 mStreamConfigInfo.format[i]);
13744 }
13745
13746 /* Send meta stream info once again so that ISP can start */
13747 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13748 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13749 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13750 mParameters);
13751 if (rc < 0) {
13752 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13753 }
13754
13755 rc = startAllChannels();
13756 if (rc < 0) {
13757 LOGE("startAllChannels failed");
13758 return rc;
13759 }
13760
13761 LOGD("X");
13762 return rc;
13763}
13764
13765/*===========================================================================
13766 * FUNCTION : stopAllChannels
13767 *
13768 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13769 *
13770 * PARAMETERS : None
13771 *
13772 * RETURN : NO_ERROR on success
13773 * Error codes on failure
13774 *
13775 *==========================================================================*/
13776int32_t QCamera3HardwareInterface::stopAllChannels()
13777{
13778 int32_t rc = NO_ERROR;
13779
13780 LOGD("Stopping all channels");
13781 // Stop the Streams/Channels
13782 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13783 it != mStreamInfo.end(); it++) {
13784 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13785 if (channel) {
13786 channel->stop();
13787 }
13788 (*it)->status = INVALID;
13789 }
13790
13791 if (mSupportChannel) {
13792 mSupportChannel->stop();
13793 }
13794 if (mAnalysisChannel) {
13795 mAnalysisChannel->stop();
13796 }
13797 if (mRawDumpChannel) {
13798 mRawDumpChannel->stop();
13799 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013800 if (mHdrPlusRawSrcChannel) {
13801 mHdrPlusRawSrcChannel->stop();
13802 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013803 if (mMetadataChannel) {
13804 /* If content of mStreamInfo is not 0, there is metadata stream */
13805 mMetadataChannel->stop();
13806 }
13807
13808 LOGD("All channels stopped");
13809 return rc;
13810}
13811
13812/*===========================================================================
13813 * FUNCTION : startAllChannels
13814 *
13815 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13816 *
13817 * PARAMETERS : None
13818 *
13819 * RETURN : NO_ERROR on success
13820 * Error codes on failure
13821 *
13822 *==========================================================================*/
13823int32_t QCamera3HardwareInterface::startAllChannels()
13824{
13825 int32_t rc = NO_ERROR;
13826
13827 LOGD("Start all channels ");
13828 // Start the Streams/Channels
13829 if (mMetadataChannel) {
13830 /* If content of mStreamInfo is not 0, there is metadata stream */
13831 rc = mMetadataChannel->start();
13832 if (rc < 0) {
13833 LOGE("META channel start failed");
13834 return rc;
13835 }
13836 }
13837 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13838 it != mStreamInfo.end(); it++) {
13839 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13840 if (channel) {
13841 rc = channel->start();
13842 if (rc < 0) {
13843 LOGE("channel start failed");
13844 return rc;
13845 }
13846 }
13847 }
13848 if (mAnalysisChannel) {
13849 mAnalysisChannel->start();
13850 }
13851 if (mSupportChannel) {
13852 rc = mSupportChannel->start();
13853 if (rc < 0) {
13854 LOGE("Support channel start failed");
13855 return rc;
13856 }
13857 }
13858 if (mRawDumpChannel) {
13859 rc = mRawDumpChannel->start();
13860 if (rc < 0) {
13861 LOGE("RAW dump channel start failed");
13862 return rc;
13863 }
13864 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013865 if (mHdrPlusRawSrcChannel) {
13866 rc = mHdrPlusRawSrcChannel->start();
13867 if (rc < 0) {
13868 LOGE("HDR+ RAW channel start failed");
13869 return rc;
13870 }
13871 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013872
13873 LOGD("All channels started");
13874 return rc;
13875}
13876
13877/*===========================================================================
13878 * FUNCTION : notifyErrorForPendingRequests
13879 *
13880 * DESCRIPTION: This function sends error for all the pending requests/buffers
13881 *
13882 * PARAMETERS : None
13883 *
13884 * RETURN : Error codes
13885 * NO_ERROR on success
13886 *
13887 *==========================================================================*/
13888int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13889{
Emilian Peev7650c122017-01-19 08:24:33 -080013890 notifyErrorFoPendingDepthData(mDepthChannel);
13891
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013892 auto pendingRequest = mPendingRequestsList.begin();
13893 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013894
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013895 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13896 // buffers (for which buffers aren't sent yet).
13897 while (pendingRequest != mPendingRequestsList.end() ||
13898 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13899 if (pendingRequest == mPendingRequestsList.end() ||
13900 pendingBuffer->frame_number < pendingRequest->frame_number) {
13901 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13902 // with error.
13903 for (auto &info : pendingBuffer->mPendingBufferList) {
13904 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013905 camera3_notify_msg_t notify_msg;
13906 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13907 notify_msg.type = CAMERA3_MSG_ERROR;
13908 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013909 notify_msg.message.error.error_stream = info.stream;
13910 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013911 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013912
13913 camera3_stream_buffer_t buffer = {};
13914 buffer.acquire_fence = -1;
13915 buffer.release_fence = -1;
13916 buffer.buffer = info.buffer;
13917 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13918 buffer.stream = info.stream;
13919 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070013920 }
13921
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013922 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13923 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
13924 pendingBuffer->frame_number > pendingRequest->frame_number) {
13925 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070013926 camera3_notify_msg_t notify_msg;
13927 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13928 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013929 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
13930 notify_msg.message.error.error_stream = nullptr;
13931 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013932 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013933
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013934 if (pendingRequest->input_buffer != nullptr) {
13935 camera3_capture_result result = {};
13936 result.frame_number = pendingRequest->frame_number;
13937 result.result = nullptr;
13938 result.input_buffer = pendingRequest->input_buffer;
13939 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013940 }
13941
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013942 mShutterDispatcher.clear(pendingRequest->frame_number);
13943 pendingRequest = mPendingRequestsList.erase(pendingRequest);
13944 } else {
13945 // If both buffers and result metadata weren't sent yet, notify about a request error
13946 // and return buffers with error.
13947 for (auto &info : pendingBuffer->mPendingBufferList) {
13948 camera3_notify_msg_t notify_msg;
13949 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13950 notify_msg.type = CAMERA3_MSG_ERROR;
13951 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13952 notify_msg.message.error.error_stream = info.stream;
13953 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
13954 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013955
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013956 camera3_stream_buffer_t buffer = {};
13957 buffer.acquire_fence = -1;
13958 buffer.release_fence = -1;
13959 buffer.buffer = info.buffer;
13960 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13961 buffer.stream = info.stream;
13962 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
13963 }
13964
13965 if (pendingRequest->input_buffer != nullptr) {
13966 camera3_capture_result result = {};
13967 result.frame_number = pendingRequest->frame_number;
13968 result.result = nullptr;
13969 result.input_buffer = pendingRequest->input_buffer;
13970 orchestrateResult(&result);
13971 }
13972
13973 mShutterDispatcher.clear(pendingRequest->frame_number);
13974 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13975 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070013976 }
13977 }
13978
13979 /* Reset pending frame Drop list and requests list */
13980 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013981 mShutterDispatcher.clear();
13982 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070013983 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013984 LOGH("Cleared all the pending buffers ");
13985
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013986 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013987}
13988
13989bool QCamera3HardwareInterface::isOnEncoder(
13990 const cam_dimension_t max_viewfinder_size,
13991 uint32_t width, uint32_t height)
13992{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013993 return ((width > (uint32_t)max_viewfinder_size.width) ||
13994 (height > (uint32_t)max_viewfinder_size.height) ||
13995 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13996 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013997}
13998
13999/*===========================================================================
14000 * FUNCTION : setBundleInfo
14001 *
14002 * DESCRIPTION: Set bundle info for all streams that are bundle.
14003 *
14004 * PARAMETERS : None
14005 *
14006 * RETURN : NO_ERROR on success
14007 * Error codes on failure
14008 *==========================================================================*/
14009int32_t QCamera3HardwareInterface::setBundleInfo()
14010{
14011 int32_t rc = NO_ERROR;
14012
14013 if (mChannelHandle) {
14014 cam_bundle_config_t bundleInfo;
14015 memset(&bundleInfo, 0, sizeof(bundleInfo));
14016 rc = mCameraHandle->ops->get_bundle_info(
14017 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14018 if (rc != NO_ERROR) {
14019 LOGE("get_bundle_info failed");
14020 return rc;
14021 }
14022 if (mAnalysisChannel) {
14023 mAnalysisChannel->setBundleInfo(bundleInfo);
14024 }
14025 if (mSupportChannel) {
14026 mSupportChannel->setBundleInfo(bundleInfo);
14027 }
14028 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14029 it != mStreamInfo.end(); it++) {
14030 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14031 channel->setBundleInfo(bundleInfo);
14032 }
14033 if (mRawDumpChannel) {
14034 mRawDumpChannel->setBundleInfo(bundleInfo);
14035 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014036 if (mHdrPlusRawSrcChannel) {
14037 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14038 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014039 }
14040
14041 return rc;
14042}
14043
14044/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014045 * FUNCTION : setInstantAEC
14046 *
14047 * DESCRIPTION: Set Instant AEC related params.
14048 *
14049 * PARAMETERS :
14050 * @meta: CameraMetadata reference
14051 *
14052 * RETURN : NO_ERROR on success
14053 * Error codes on failure
14054 *==========================================================================*/
14055int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14056{
14057 int32_t rc = NO_ERROR;
14058 uint8_t val = 0;
14059 char prop[PROPERTY_VALUE_MAX];
14060
14061 // First try to configure instant AEC from framework metadata
14062 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14063 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14064 }
14065
14066 // If framework did not set this value, try to read from set prop.
14067 if (val == 0) {
14068 memset(prop, 0, sizeof(prop));
14069 property_get("persist.camera.instant.aec", prop, "0");
14070 val = (uint8_t)atoi(prop);
14071 }
14072
14073 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14074 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14075 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14076 mInstantAEC = val;
14077 mInstantAECSettledFrameNumber = 0;
14078 mInstantAecFrameIdxCount = 0;
14079 LOGH("instantAEC value set %d",val);
14080 if (mInstantAEC) {
14081 memset(prop, 0, sizeof(prop));
14082 property_get("persist.camera.ae.instant.bound", prop, "10");
14083 int32_t aec_frame_skip_cnt = atoi(prop);
14084 if (aec_frame_skip_cnt >= 0) {
14085 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14086 } else {
14087 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14088 rc = BAD_VALUE;
14089 }
14090 }
14091 } else {
14092 LOGE("Bad instant aec value set %d", val);
14093 rc = BAD_VALUE;
14094 }
14095 return rc;
14096}
14097
14098/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014099 * FUNCTION : get_num_overall_buffers
14100 *
14101 * DESCRIPTION: Estimate number of pending buffers across all requests.
14102 *
14103 * PARAMETERS : None
14104 *
14105 * RETURN : Number of overall pending buffers
14106 *
14107 *==========================================================================*/
14108uint32_t PendingBuffersMap::get_num_overall_buffers()
14109{
14110 uint32_t sum_buffers = 0;
14111 for (auto &req : mPendingBuffersInRequest) {
14112 sum_buffers += req.mPendingBufferList.size();
14113 }
14114 return sum_buffers;
14115}
14116
14117/*===========================================================================
14118 * FUNCTION : removeBuf
14119 *
14120 * DESCRIPTION: Remove a matching buffer from tracker.
14121 *
14122 * PARAMETERS : @buffer: image buffer for the callback
14123 *
14124 * RETURN : None
14125 *
14126 *==========================================================================*/
14127void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14128{
14129 bool buffer_found = false;
14130 for (auto req = mPendingBuffersInRequest.begin();
14131 req != mPendingBuffersInRequest.end(); req++) {
14132 for (auto k = req->mPendingBufferList.begin();
14133 k != req->mPendingBufferList.end(); k++ ) {
14134 if (k->buffer == buffer) {
14135 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14136 req->frame_number, buffer);
14137 k = req->mPendingBufferList.erase(k);
14138 if (req->mPendingBufferList.empty()) {
14139 // Remove this request from Map
14140 req = mPendingBuffersInRequest.erase(req);
14141 }
14142 buffer_found = true;
14143 break;
14144 }
14145 }
14146 if (buffer_found) {
14147 break;
14148 }
14149 }
14150 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14151 get_num_overall_buffers());
14152}
14153
14154/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014155 * FUNCTION : getBufErrStatus
14156 *
14157 * DESCRIPTION: get buffer error status
14158 *
14159 * PARAMETERS : @buffer: buffer handle
14160 *
14161 * RETURN : Error status
14162 *
14163 *==========================================================================*/
14164int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14165{
14166 for (auto& req : mPendingBuffersInRequest) {
14167 for (auto& k : req.mPendingBufferList) {
14168 if (k.buffer == buffer)
14169 return k.bufStatus;
14170 }
14171 }
14172 return CAMERA3_BUFFER_STATUS_OK;
14173}
14174
14175/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014176 * FUNCTION : setPAAFSupport
14177 *
14178 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14179 * feature mask according to stream type and filter
14180 * arrangement
14181 *
14182 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14183 * @stream_type: stream type
14184 * @filter_arrangement: filter arrangement
14185 *
14186 * RETURN : None
14187 *==========================================================================*/
14188void QCamera3HardwareInterface::setPAAFSupport(
14189 cam_feature_mask_t& feature_mask,
14190 cam_stream_type_t stream_type,
14191 cam_color_filter_arrangement_t filter_arrangement)
14192{
Thierry Strudel3d639192016-09-09 11:52:26 -070014193 switch (filter_arrangement) {
14194 case CAM_FILTER_ARRANGEMENT_RGGB:
14195 case CAM_FILTER_ARRANGEMENT_GRBG:
14196 case CAM_FILTER_ARRANGEMENT_GBRG:
14197 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014198 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14199 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014200 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014201 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14202 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014203 }
14204 break;
14205 case CAM_FILTER_ARRANGEMENT_Y:
14206 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14207 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14208 }
14209 break;
14210 default:
14211 break;
14212 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014213 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14214 feature_mask, stream_type, filter_arrangement);
14215
14216
Thierry Strudel3d639192016-09-09 11:52:26 -070014217}
14218
14219/*===========================================================================
14220* FUNCTION : getSensorMountAngle
14221*
14222* DESCRIPTION: Retrieve sensor mount angle
14223*
14224* PARAMETERS : None
14225*
14226* RETURN : sensor mount angle in uint32_t
14227*==========================================================================*/
14228uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14229{
14230 return gCamCapability[mCameraId]->sensor_mount_angle;
14231}
14232
14233/*===========================================================================
14234* FUNCTION : getRelatedCalibrationData
14235*
14236* DESCRIPTION: Retrieve related system calibration data
14237*
14238* PARAMETERS : None
14239*
14240* RETURN : Pointer of related system calibration data
14241*==========================================================================*/
14242const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14243{
14244 return (const cam_related_system_calibration_data_t *)
14245 &(gCamCapability[mCameraId]->related_cam_calibration);
14246}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014247
14248/*===========================================================================
14249 * FUNCTION : is60HzZone
14250 *
14251 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14252 *
14253 * PARAMETERS : None
14254 *
14255 * RETURN : True if in 60Hz zone, False otherwise
14256 *==========================================================================*/
14257bool QCamera3HardwareInterface::is60HzZone()
14258{
14259 time_t t = time(NULL);
14260 struct tm lt;
14261
14262 struct tm* r = localtime_r(&t, &lt);
14263
14264 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14265 return true;
14266 else
14267 return false;
14268}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014269
14270/*===========================================================================
14271 * FUNCTION : adjustBlackLevelForCFA
14272 *
14273 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14274 * of bayer CFA (Color Filter Array).
14275 *
14276 * PARAMETERS : @input: black level pattern in the order of RGGB
14277 * @output: black level pattern in the order of CFA
14278 * @color_arrangement: CFA color arrangement
14279 *
14280 * RETURN : None
14281 *==========================================================================*/
14282template<typename T>
14283void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14284 T input[BLACK_LEVEL_PATTERN_CNT],
14285 T output[BLACK_LEVEL_PATTERN_CNT],
14286 cam_color_filter_arrangement_t color_arrangement)
14287{
14288 switch (color_arrangement) {
14289 case CAM_FILTER_ARRANGEMENT_GRBG:
14290 output[0] = input[1];
14291 output[1] = input[0];
14292 output[2] = input[3];
14293 output[3] = input[2];
14294 break;
14295 case CAM_FILTER_ARRANGEMENT_GBRG:
14296 output[0] = input[2];
14297 output[1] = input[3];
14298 output[2] = input[0];
14299 output[3] = input[1];
14300 break;
14301 case CAM_FILTER_ARRANGEMENT_BGGR:
14302 output[0] = input[3];
14303 output[1] = input[2];
14304 output[2] = input[1];
14305 output[3] = input[0];
14306 break;
14307 case CAM_FILTER_ARRANGEMENT_RGGB:
14308 output[0] = input[0];
14309 output[1] = input[1];
14310 output[2] = input[2];
14311 output[3] = input[3];
14312 break;
14313 default:
14314 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14315 break;
14316 }
14317}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014318
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014319void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14320 CameraMetadata &resultMetadata,
14321 std::shared_ptr<metadata_buffer_t> settings)
14322{
14323 if (settings == nullptr) {
14324 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14325 return;
14326 }
14327
14328 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14329 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14330 }
14331
14332 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14333 String8 str((const char *)gps_methods);
14334 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14335 }
14336
14337 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14338 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14339 }
14340
14341 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14342 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14343 }
14344
14345 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14346 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14347 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14348 }
14349
14350 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14351 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14352 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14353 }
14354
14355 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14356 int32_t fwk_thumb_size[2];
14357 fwk_thumb_size[0] = thumb_size->width;
14358 fwk_thumb_size[1] = thumb_size->height;
14359 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14360 }
14361
14362 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14363 uint8_t fwk_intent = intent[0];
14364 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14365 }
14366}
14367
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014368bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14369 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14370 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014371{
14372 if (hdrPlusRequest == nullptr) return false;
14373
14374 // Check noise reduction mode is high quality.
14375 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14376 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14377 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014378 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14379 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014380 return false;
14381 }
14382
14383 // Check edge mode is high quality.
14384 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14385 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14386 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14387 return false;
14388 }
14389
14390 if (request.num_output_buffers != 1 ||
14391 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14392 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014393 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14394 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14395 request.output_buffers[0].stream->width,
14396 request.output_buffers[0].stream->height,
14397 request.output_buffers[0].stream->format);
14398 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014399 return false;
14400 }
14401
14402 // Get a YUV buffer from pic channel.
14403 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14404 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14405 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14406 if (res != OK) {
14407 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14408 __FUNCTION__, strerror(-res), res);
14409 return false;
14410 }
14411
14412 pbcamera::StreamBuffer buffer;
14413 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014414 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014415 buffer.data = yuvBuffer->buffer;
14416 buffer.dataSize = yuvBuffer->frame_len;
14417
14418 pbcamera::CaptureRequest pbRequest;
14419 pbRequest.id = request.frame_number;
14420 pbRequest.outputBuffers.push_back(buffer);
14421
14422 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014423 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014424 if (res != OK) {
14425 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14426 strerror(-res), res);
14427 return false;
14428 }
14429
14430 hdrPlusRequest->yuvBuffer = yuvBuffer;
14431 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14432
14433 return true;
14434}
14435
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014436status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14437{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014438 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14439 return OK;
14440 }
14441
14442 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14443 if (res != OK) {
14444 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14445 strerror(-res), res);
14446 return res;
14447 }
14448 gHdrPlusClientOpening = true;
14449
14450 return OK;
14451}
14452
Chien-Yu Chenee335912017-02-09 17:53:20 -080014453status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14454{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014455 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014456
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014457 if (mHdrPlusModeEnabled) {
14458 return OK;
14459 }
14460
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014461 // Check if gHdrPlusClient is opened or being opened.
14462 if (gHdrPlusClient == nullptr) {
14463 if (gHdrPlusClientOpening) {
14464 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14465 return OK;
14466 }
14467
14468 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014469 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014470 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14471 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014472 return res;
14473 }
14474
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014475 // When opening HDR+ client completes, HDR+ mode will be enabled.
14476 return OK;
14477
Chien-Yu Chenee335912017-02-09 17:53:20 -080014478 }
14479
14480 // Configure stream for HDR+.
14481 res = configureHdrPlusStreamsLocked();
14482 if (res != OK) {
14483 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014484 return res;
14485 }
14486
14487 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14488 res = gHdrPlusClient->setZslHdrPlusMode(true);
14489 if (res != OK) {
14490 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014491 return res;
14492 }
14493
14494 mHdrPlusModeEnabled = true;
14495 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14496
14497 return OK;
14498}
14499
14500void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14501{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014502 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014503 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014504 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14505 if (res != OK) {
14506 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14507 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014508
14509 // Close HDR+ client so Easel can enter low power mode.
14510 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14511 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014512 }
14513
14514 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014515 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014516 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14517}
14518
14519status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014520{
14521 pbcamera::InputConfiguration inputConfig;
14522 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14523 status_t res = OK;
14524
14525 // Configure HDR+ client streams.
14526 // Get input config.
14527 if (mHdrPlusRawSrcChannel) {
14528 // HDR+ input buffers will be provided by HAL.
14529 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14530 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14531 if (res != OK) {
14532 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14533 __FUNCTION__, strerror(-res), res);
14534 return res;
14535 }
14536
14537 inputConfig.isSensorInput = false;
14538 } else {
14539 // Sensor MIPI will send data to Easel.
14540 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014541 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014542 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14543 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14544 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14545 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14546 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014547 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014548 if (mSensorModeInfo.num_raw_bits != 10) {
14549 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14550 mSensorModeInfo.num_raw_bits);
14551 return BAD_VALUE;
14552 }
14553
14554 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014555 }
14556
14557 // Get output configurations.
14558 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014559 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014560
14561 // Easel may need to output YUV output buffers if mPictureChannel was created.
14562 pbcamera::StreamConfiguration yuvOutputConfig;
14563 if (mPictureChannel != nullptr) {
14564 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14565 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14566 if (res != OK) {
14567 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14568 __FUNCTION__, strerror(-res), res);
14569
14570 return res;
14571 }
14572
14573 outputStreamConfigs.push_back(yuvOutputConfig);
14574 }
14575
14576 // TODO: consider other channels for YUV output buffers.
14577
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014578 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014579 if (res != OK) {
14580 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14581 strerror(-res), res);
14582 return res;
14583 }
14584
14585 return OK;
14586}
14587
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014588void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14589{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014590 if (client == nullptr) {
14591 ALOGE("%s: Opened client is null.", __FUNCTION__);
14592 return;
14593 }
14594
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014595 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014596 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14597
14598 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014599 if (!gHdrPlusClientOpening) {
14600 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14601 return;
14602 }
14603
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014604 gHdrPlusClient = std::move(client);
14605 gHdrPlusClientOpening = false;
14606
14607 // Set static metadata.
14608 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14609 if (res != OK) {
14610 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14611 __FUNCTION__, strerror(-res), res);
14612 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14613 gHdrPlusClient = nullptr;
14614 return;
14615 }
14616
14617 // Enable HDR+ mode.
14618 res = enableHdrPlusModeLocked();
14619 if (res != OK) {
14620 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14621 }
14622}
14623
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014624void QCamera3HardwareInterface::onOpenFailed(status_t err)
14625{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014626 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14627 Mutex::Autolock l(gHdrPlusClientLock);
14628 gHdrPlusClientOpening = false;
14629}
14630
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014631void QCamera3HardwareInterface::onFatalError()
14632{
14633 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14634
14635 // Set HAL state to error.
14636 pthread_mutex_lock(&mMutex);
14637 mState = ERROR;
14638 pthread_mutex_unlock(&mMutex);
14639
14640 handleCameraDeviceError();
14641}
14642
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014643void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014644 const camera_metadata_t &resultMetadata)
14645{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014646 if (result != nullptr) {
14647 if (result->outputBuffers.size() != 1) {
14648 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14649 result->outputBuffers.size());
14650 return;
14651 }
14652
14653 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14654 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14655 result->outputBuffers[0].streamId);
14656 return;
14657 }
14658
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014659 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014660 HdrPlusPendingRequest pendingRequest;
14661 {
14662 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14663 auto req = mHdrPlusPendingRequests.find(result->requestId);
14664 pendingRequest = req->second;
14665 }
14666
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014667 // Update the result metadata with the settings of the HDR+ still capture request because
14668 // the result metadata belongs to a ZSL buffer.
14669 CameraMetadata metadata;
14670 metadata = &resultMetadata;
14671 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14672 camera_metadata_t* updatedResultMetadata = metadata.release();
14673
14674 QCamera3PicChannel *picChannel =
14675 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14676
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014677 // Check if dumping HDR+ YUV output is enabled.
14678 char prop[PROPERTY_VALUE_MAX];
14679 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14680 bool dumpYuvOutput = atoi(prop);
14681
14682 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014683 // Dump yuv buffer to a ppm file.
14684 pbcamera::StreamConfiguration outputConfig;
14685 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14686 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14687 if (rc == OK) {
14688 char buf[FILENAME_MAX] = {};
14689 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14690 result->requestId, result->outputBuffers[0].streamId,
14691 outputConfig.image.width, outputConfig.image.height);
14692
14693 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14694 } else {
14695 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14696 __FUNCTION__, strerror(-rc), rc);
14697 }
14698 }
14699
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014700 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14701 auto halMetadata = std::make_shared<metadata_buffer_t>();
14702 clear_metadata_buffer(halMetadata.get());
14703
14704 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14705 // encoding.
14706 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14707 halStreamId, /*minFrameDuration*/0);
14708 if (res == OK) {
14709 // Return the buffer to pic channel for encoding.
14710 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14711 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14712 halMetadata);
14713 } else {
14714 // Return the buffer without encoding.
14715 // TODO: This should not happen but we may want to report an error buffer to camera
14716 // service.
14717 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14718 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14719 strerror(-res), res);
14720 }
14721
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014722 // Find the timestamp
14723 camera_metadata_ro_entry_t entry;
14724 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14725 ANDROID_SENSOR_TIMESTAMP, &entry);
14726 if (res != OK) {
14727 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14728 __FUNCTION__, result->requestId, strerror(-res), res);
14729 } else {
14730 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14731 }
14732
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014733 // Send HDR+ metadata to framework.
14734 {
14735 pthread_mutex_lock(&mMutex);
14736
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014737 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14738 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014739 pthread_mutex_unlock(&mMutex);
14740 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014741
14742 // Remove the HDR+ pending request.
14743 {
14744 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14745 auto req = mHdrPlusPendingRequests.find(result->requestId);
14746 mHdrPlusPendingRequests.erase(req);
14747 }
14748 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014749}
14750
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014751void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14752{
14753 if (failedResult == nullptr) {
14754 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14755 return;
14756 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014757
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014758 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014759
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014760 // Remove the pending HDR+ request.
14761 {
14762 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14763 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14764
14765 // Return the buffer to pic channel.
14766 QCamera3PicChannel *picChannel =
14767 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14768 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14769
14770 mHdrPlusPendingRequests.erase(pendingRequest);
14771 }
14772
14773 pthread_mutex_lock(&mMutex);
14774
14775 // Find the pending buffers.
14776 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14777 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14778 if (pendingBuffers->frame_number == failedResult->requestId) {
14779 break;
14780 }
14781 pendingBuffers++;
14782 }
14783
14784 // Send out buffer errors for the pending buffers.
14785 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14786 std::vector<camera3_stream_buffer_t> streamBuffers;
14787 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14788 // Prepare a stream buffer.
14789 camera3_stream_buffer_t streamBuffer = {};
14790 streamBuffer.stream = buffer.stream;
14791 streamBuffer.buffer = buffer.buffer;
14792 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14793 streamBuffer.acquire_fence = -1;
14794 streamBuffer.release_fence = -1;
14795
14796 streamBuffers.push_back(streamBuffer);
14797
14798 // Send out error buffer event.
14799 camera3_notify_msg_t notify_msg = {};
14800 notify_msg.type = CAMERA3_MSG_ERROR;
14801 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14802 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14803 notify_msg.message.error.error_stream = buffer.stream;
14804
14805 orchestrateNotify(&notify_msg);
14806 }
14807
14808 camera3_capture_result_t result = {};
14809 result.frame_number = pendingBuffers->frame_number;
14810 result.num_output_buffers = streamBuffers.size();
14811 result.output_buffers = &streamBuffers[0];
14812
14813 // Send out result with buffer errors.
14814 orchestrateResult(&result);
14815
14816 // Remove pending buffers.
14817 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14818 }
14819
14820 // Remove pending request.
14821 auto halRequest = mPendingRequestsList.begin();
14822 while (halRequest != mPendingRequestsList.end()) {
14823 if (halRequest->frame_number == failedResult->requestId) {
14824 mPendingRequestsList.erase(halRequest);
14825 break;
14826 }
14827 halRequest++;
14828 }
14829
14830 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014831}
14832
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014833
14834ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14835 mParent(parent) {}
14836
14837void ShutterDispatcher::expectShutter(uint32_t frameNumber)
14838{
14839 std::lock_guard<std::mutex> lock(mLock);
14840 mShutters.emplace(frameNumber, Shutter());
14841}
14842
14843void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14844{
14845 std::lock_guard<std::mutex> lock(mLock);
14846
14847 // Make this frame's shutter ready.
14848 auto shutter = mShutters.find(frameNumber);
14849 if (shutter == mShutters.end()) {
14850 // Shutter was already sent.
14851 return;
14852 }
14853
14854 shutter->second.ready = true;
14855 shutter->second.timestamp = timestamp;
14856
14857 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
14858 shutter = mShutters.begin();
14859 while (shutter != mShutters.end()) {
14860 if (!shutter->second.ready) {
14861 // If this shutter is not ready, the following shutters can't be sent.
14862 break;
14863 }
14864
14865 camera3_notify_msg_t msg = {};
14866 msg.type = CAMERA3_MSG_SHUTTER;
14867 msg.message.shutter.frame_number = shutter->first;
14868 msg.message.shutter.timestamp = shutter->second.timestamp;
14869 mParent->orchestrateNotify(&msg);
14870
14871 shutter = mShutters.erase(shutter);
14872 }
14873}
14874
14875void ShutterDispatcher::clear(uint32_t frameNumber)
14876{
14877 std::lock_guard<std::mutex> lock(mLock);
14878 mShutters.erase(frameNumber);
14879}
14880
14881void ShutterDispatcher::clear()
14882{
14883 std::lock_guard<std::mutex> lock(mLock);
14884
14885 // Log errors for stale shutters.
14886 for (auto &shutter : mShutters) {
14887 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14888 __FUNCTION__, shutter.first, shutter.second.ready,
14889 shutter.second.timestamp);
14890 }
14891 mShutters.clear();
14892}
14893
14894OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
14895 mParent(parent) {}
14896
14897status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
14898{
14899 std::lock_guard<std::mutex> lock(mLock);
14900 mStreamBuffers.clear();
14901 if (!streamList) {
14902 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
14903 return -EINVAL;
14904 }
14905
14906 // Create a "frame-number -> buffer" map for each stream.
14907 for (uint32_t i = 0; i < streamList->num_streams; i++) {
14908 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
14909 }
14910
14911 return OK;
14912}
14913
14914status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
14915{
14916 std::lock_guard<std::mutex> lock(mLock);
14917
14918 // Find the "frame-number -> buffer" map for the stream.
14919 auto buffers = mStreamBuffers.find(stream);
14920 if (buffers == mStreamBuffers.end()) {
14921 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
14922 return -EINVAL;
14923 }
14924
14925 // Create an unready buffer for this frame number.
14926 buffers->second.emplace(frameNumber, Buffer());
14927 return OK;
14928}
14929
14930void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
14931 const camera3_stream_buffer_t &buffer)
14932{
14933 std::lock_guard<std::mutex> lock(mLock);
14934
14935 // Find the frame number -> buffer map for the stream.
14936 auto buffers = mStreamBuffers.find(buffer.stream);
14937 if (buffers == mStreamBuffers.end()) {
14938 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
14939 return;
14940 }
14941
14942 // Find the unready buffer this frame number and mark it ready.
14943 auto pendingBuffer = buffers->second.find(frameNumber);
14944 if (pendingBuffer == buffers->second.end()) {
14945 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
14946 return;
14947 }
14948
14949 pendingBuffer->second.ready = true;
14950 pendingBuffer->second.buffer = buffer;
14951
14952 // Iterate through the buffers and send out buffers until the one that's not ready yet.
14953 pendingBuffer = buffers->second.begin();
14954 while (pendingBuffer != buffers->second.end()) {
14955 if (!pendingBuffer->second.ready) {
14956 // If this buffer is not ready, the following buffers can't be sent.
14957 break;
14958 }
14959
14960 camera3_capture_result_t result = {};
14961 result.frame_number = pendingBuffer->first;
14962 result.num_output_buffers = 1;
14963 result.output_buffers = &pendingBuffer->second.buffer;
14964
14965 // Send out result with buffer errors.
14966 mParent->orchestrateResult(&result);
14967
14968 pendingBuffer = buffers->second.erase(pendingBuffer);
14969 }
14970}
14971
14972void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
14973{
14974 std::lock_guard<std::mutex> lock(mLock);
14975
14976 // Log errors for stale buffers.
14977 for (auto &buffers : mStreamBuffers) {
14978 for (auto &buffer : buffers.second) {
14979 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
14980 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
14981 }
14982 buffers.second.clear();
14983 }
14984
14985 if (clearConfiguredStreams) {
14986 mStreamBuffers.clear();
14987 }
14988}
14989
Thierry Strudel3d639192016-09-09 11:52:26 -070014990}; //end namespace qcamera